branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>MariaDelgado10/DelgadoMaria_hw2<file_sep>/Fourier.py
import numpy as np
import matplotlib.pylab as plt
from matplotlib import mlab
signal= np.genfromtxt("signal.dat")
signalsuma=np.genfromtxt("signalSuma.dat")
signal_x=np.genfromtxt("signal.dat")[:,0]
#print(signal_x)
signal_y=np.genfromtxt("signal.dat")[:,1]
#print(signal_y)
suma_x=np.genfromtxt("signalSuma.dat")[:,0]
suma_y=np.genfromtxt("signalSuma.dat")[:,1]
plt.figure(figsize=(10,5))
plt.subplot(2,2,1)
plt.plot(signal_x,signal_y)
plt.title("signal")
plt.subplot(2,2,2)
plt.plot(suma_x,suma_y)
plt.title("signalSuma")
plt.savefig("Señales.png")
plt.show()
#Trannsformada discreta de Fourier de signal.dat
N=(len(signal))
def fourier(signal):
y=signal[:,1]
x=signal[:,0]
N = len(y)
val_transfreal=np.zeros(0)
val_transimg=np.zeros(0)
n = np.linspace(0, N-1, N)
# for i in range(0,N):
# transformada=0
for k in range(N):
val_trans[k]=np.sum(y[k]*np.exp(-2*1j*np.pi*k*n/N))
frecuencias = np.zeros(N)
dtiempo =x[1]-x[0]
for i in range(int(N/2)):
frecuencias[i] = (i+1)*dtiempo
for i in range(-1,-int(N/2)-1, -1):
frecuencias[i] = (i)*dtiempo
return frecuencias, val_transfreal, val_transimg
#return(N)
#print(len(val_transf))
#plt.plot(fourier(signal))
#plt.savefig("Fourierpropia_trans.png")
transformada=np.fft.fft(signal_y,N,norm=None)
transformada2=np.fft.fft(suma_y,N,norm=None)
dtiempo=signal_x[1]-signal_x[0]
dtiempo2=suma_x[1]-suma_x[0]
frecuencias=np.fft.fftfreq(N, d=dtiempo)
frecuencias2=np.fft.fftfreq(N, d=dtiempo2)
print(frecuencias)
plt.figure(figsize=(10,5))
plt.subplot(2,2,1)
plt.plot(frecuencias,transformada)
plt.xlabel("frecuencias")
plt.ylabel("transformada")
plt.title("transformada signal con fft")
plt.subplot(2,2,2)
plt.plot(frecuencias2,transformada2)
plt.xlabel("frecuencias")
plt.ylabel("transformada")
plt.title("transformada signalsuma con fft")
plt.savefig("Fourier_trans.png")
plt.figure(figsize=(10,5))
plt.subplot(2,2,1)
plt.specgram(signal_y, NFFT=256, Fs=1/dtiempo) #ojojojojojojojo esto es 1/dt
plt.xlabel("tiempo")
plt.ylabel("frecuencias")
plt.title("espectograma de la señal signal")
plt.subplot(2,2,2)
plt.specgram(suma_y, NFFT=256, Fs=1/dtiempo2)
plt.title("espectograma de la señal signalsuma")
plt.xlabel("tiempo")
plt.ylabel("frecuencias")
plt.savefig("espectograma2señales.png")
temblor=np.genfromtxt("temblor.txt", skip_header=4)
plt.figure()
plt.plot(temblor)
plt.xlabel("tiempo")
plt.ylabel("señal sismica")
plt.title("señal sismica vs tiempo")
plt.savefig("temblor.png")
transformada_temblor=np.fft.fft(temblor,N,norm=None)
transformada=np.fft.fft(temblor,N,norm=None)
dtiempo_temblor=temblor[1]-temblor[0]
frecuencias_temblor=np.fft.fftfreq(N, d=dtiempo_temblor)
plt.figure()
plt.plot(frecuencias_temblor,transformada_temblor)
plt.xlabel("frecuencia ")
plt.ylabel("transformada ")
plt.title("tranformada de fourier del temblor")
plt.savefig("transfouriertemblor.png")
plt.figure()
plt.specgram(temblor, NFFT=256, Fs=512)
plt.title("espectograma de la señal sismica")
plt.xlabel("tiempo")
plt.ylabel("frecuencias")
plt.savefig("espectogramatemblor.png")<file_sep>/Plots.py
import numpy as np
import matplotlib.pylab as plt
#w=np.genfromtxt("w.dat")
#u=np.genfromtxt("edificio.dat")
w=np.linspace( 0.2*np.sqrt(2), 3.0*np.sqrt(2),100)
caso1=np.genfromtxt("caso1.dat")
caso2=np.genfromtxt("caso2.dat")
caso3=np.genfromtxt("caso3.dat")
caso4=np.genfromtxt("caso4.dat")
#print(len(u))
#print(len(w))
#plt.plot(w, u[:,0])
#plt.show()
plt.figure()
plt.title("omega1 0.707")
plt.xlabel("t")
plt.ylabel("u")
plt.plot(caso1[:,3],caso1[:,0])
plt.savefig("omega1.png")
plt.figure()
plt.title("omega2 1.41")
plt.xlabel("t")
plt.ylabel("u")
plt.plot(caso2[:,3],caso2[:,0])
plt.savefig("omega2.png")
plt.figure()
plt.title("omega3 2.12")
plt.xlabel("t")
plt.ylabel("u")
plt.plot(caso3[:,3],caso3[:,0], label ="u3")
plt.savefig("omega3.png")
plt.figure()
plt.title("omega4 3.5")
plt.plot(caso4[:,3],caso4[:,0])
plt.xlabel("t")
plt.ylabel("u")
plt.savefig("omega3.png")
#plt.figure()
#plt.title("u_max vs frecuencias")
#plt.plot(])
#plt.xlabel("w")
#plt.ylabel("u_max")
#plt.savefig("omega3.png")
<file_sep>/hw2.mk
Resultados_hw2.pdf : Resultados_hw2.tex Señales.pdf Fourier_trans.pdf espectograma2señales.pdf temblor.pdf transfouriertemblor.pdf espectogramatemblor.pdf
pdflatex Resultados_hw2.tex
Señales.pdf : Fourier.py signal.dat signalSuma.dat
python Fourier.py
Fourier_trans.pdf : Fourier.py signal.dat signalSuma.dat
python Fourier.py
espectograma2señales.pdf : Fourier.py signal.dat signalSuma.dat
python Fourier.py
temblor.png : Fourier.py temblor.txt
python Fourier.py
transfouriertemblor.pdf : Fourier.py temblor.txt
python Fourier.py
espectogramatemblor.pdf : Fourier.py temblor.txt
python Fourier.py
<file_sep>/edificio.cpp
#include<iostream>
#include<cmath>
#include<stdlib.h>
#include<time.h>
#include <fstream>
using namespace std;
double m =1000.0;
double k=2000.0;
double gama= 0.0;
double w = 1.0 *pow(k/m,0.5);
double f(double t );
double tf=45.0; //tiempo que dura el temblor
int n=100;
void leap_frog(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3);
void caso1(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w);
void caso2(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w);
void caso3(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w);
void caso4(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w);
double f(double t)
{
return sin(w*t);
}
void leap_frog(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3)
{
//double t_in, dt_in, unuevo_1, unuevo_2, unuevo_3;
//t_in = *t;
//dt_in = *dt;
//unuevo_1 = *unuevo1;
//unuevo_2 = *unuevo2;
//unuevo_3 = *unuevo3;
double uviejo1;
double uviejo2;
double uviejo3;
double vviejo1;
double vviejo2;
double vviejo3;
double vnuevo1=0.0;
double vnuevo2=0.0;
double vnuevo3=0.0;
ofstream myfile;
myfile.open("edificio.dat");
for(int i=1; i<=n; i++)
{
vviejo1=vviejo1-((*dt)/2)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vviejo2=vviejo2-((*dt)/2)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vviejo3=vviejo3-((*dt)/2)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
vnuevo1=vviejo1+(*dt)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vnuevo2=vviejo2+(*dt)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vnuevo3=vviejo3+(*dt)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
*unuevo1=uviejo1+vnuevo1*(*dt);
*unuevo2=uviejo2+vnuevo2*(*dt);
*unuevo3=uviejo3+vnuevo3*(*dt);
vviejo1=vnuevo1;
vviejo2=vnuevo2;
vviejo3=vnuevo3;
*t=(*t)+(*dt);
myfile<<*unuevo1<<" "<< *unuevo2<<" "<<*unuevo3<<" "<<*t << " " <<std::endl;
//std::cout<<"ssss"<<*unuevo1<< *unuevo2<<*unuevo3<<t_in <<std::endl;
}
myfile.close();
srand((unsigned int)time(NULL));
ofstream myfile2;
myfile2.open("omegasyumax.dat");
float w1 = 0.2*pow(k/m,0.5);
float w2 = 3.0*pow(k/m,0.5);
int c=100;
double delta=w2-w1/c-1;
float omega[100];
double u_maxi[100];
double u_maxi2[100];
double u_maxi3[100];
for (int i=0;i<c;i++)
{
omega[i]= (w1+delta);
// myfile2<<omega[i]<<std::endl;
int max=0;
//int j=0;
for(int j=0; j<10000; j++)
{
if(j%100==0)
{
max=max+j;
}
vviejo1=vviejo1-((*dt)/2)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vnuevo1=vviejo1+(*dt)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
*unuevo1=uviejo1+vnuevo1*(*dt);
if(*unuevo1>u_maxi[max])
{
u_maxi[max]=(*unuevo1);
}
else
{
u_maxi[max]=uviejo1;
}
vviejo2=vviejo2-((*dt)/2)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vnuevo2=vviejo2+(*dt)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
*unuevo2=uviejo2+vnuevo2*(*dt);
if(*unuevo2>u_maxi2[max])
{
u_maxi2[max]=(*unuevo2);
}
else
{
u_maxi2[max]=uviejo2;
}
vviejo3=vviejo3-((*dt)/2)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
vnuevo3=vviejo3+(*dt)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
*unuevo3=uviejo3+vnuevo3*(*dt);
if(*unuevo3>u_maxi3[max])
{
u_maxi3[max]=(*unuevo3);
}
else
{
u_maxi3[max]=uviejo3;
}
myfile2<<u_maxi<<" "<< u_maxi2<<" "<<u_maxi3<<" " <<omega <<std::endl;
}
}
myfile2.close();
}
void caso1(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w)
{
double uviejo1;
double uviejo2;
double uviejo3;
double vviejo1;
double vviejo2;
double vviejo3;
double vnuevo1=0.0;
double vnuevo2=0.0;
double vnuevo3=0.0;
ofstream myfilec1;
myfilec1.open("caso1.dat");
for(int i=1; i<=n; i++)
{
vviejo1=vviejo1-((*dt)/2)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vviejo2=vviejo2-((*dt)/2)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vviejo3=vviejo3-((*dt)/2)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
vnuevo1=vviejo1+(*dt)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vnuevo2=vviejo2+(*dt)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vnuevo3=vviejo3+(*dt)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
*unuevo1=uviejo1+vnuevo1*(*dt);
*unuevo2=uviejo2+vnuevo2*(*dt);
*unuevo3=uviejo3+vnuevo3*(*dt);
vviejo1=vnuevo1;
vviejo2=vnuevo2;
vviejo3=vnuevo3;
*t=(*t)+(*dt);
myfilec1<<*unuevo1<<" "<< *unuevo2<<" "<<*unuevo3<<" "<<*t << " " <<w<<std::endl;
//std::cout<<"ssss"<<*unuevo1<< *unuevo2<<*unuevo3<<t_in <<std::endl;
}
myfilec1.close();
}
void caso2(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w)
{
double uviejo1;
double uviejo2;
double uviejo3;
double vviejo1;
double vviejo2;
double vviejo3;
double vnuevo1=0.0;
double vnuevo2=0.0;
double vnuevo3=0.0;
ofstream myfilec2;
myfilec2.open("caso2.dat");
for(int i=1; i<=n; i++)
{
vviejo1=vviejo1-((*dt)/2)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vviejo2=vviejo2-((*dt)/2)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vviejo3=vviejo3-((*dt)/2)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
vnuevo1=vviejo1+(*dt)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vnuevo2=vviejo2+(*dt)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vnuevo3=vviejo3+(*dt)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
*unuevo1=uviejo1+vnuevo1*(*dt);
*unuevo2=uviejo2+vnuevo2*(*dt);
*unuevo3=uviejo3+vnuevo3*(*dt);
vviejo1=vnuevo1;
vviejo2=vnuevo2;
vviejo3=vnuevo3;
*t=(*t)+(*dt);
myfilec2<<*unuevo1<<" "<< *unuevo2<<" "<<*unuevo3<<" "<<*t << " " <<w<<std::endl;
//std::cout<<"ssss"<<*unuevo1<< *unuevo2<<*unuevo3<<t_in <<std::endl;
}
myfilec2.close();
}
void caso3(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w)
{
double uviejo1;
double uviejo2;
double uviejo3;
double vviejo1;
double vviejo2;
double vviejo3;
double vnuevo1=0.0;
double vnuevo2=0.0;
double vnuevo3=0.0;
ofstream myfilec3;
myfilec3.open("caso3.dat");
for(int i=1; i<=n; i++)
{
vviejo1=vviejo1-((*dt)/2)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vviejo2=vviejo2-((*dt)/2)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vviejo3=vviejo3-((*dt)/2)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
vnuevo1=vviejo1+(*dt)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vnuevo2=vviejo2+(*dt)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vnuevo3=vviejo3+(*dt)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
*unuevo1=uviejo1+vnuevo1*(*dt);
*unuevo2=uviejo2+vnuevo2*(*dt);
*unuevo3=uviejo3+vnuevo3*(*dt);
vviejo1=vnuevo1;
vviejo2=vnuevo2;
vviejo3=vnuevo3;
*t=(*t)+(*dt);
myfilec3<<*unuevo1<<" "<< *unuevo2<<" "<<*unuevo3<<" "<<*t << " " <<w<<std::endl;
//std::cout<<"ssss"<<*unuevo1<< *unuevo2<<*unuevo3<<t_in <<std::endl;
}
myfilec3.close();
}
void caso4(double *t, double *dt, double *unuevo1, double *unuevo2, double *unuevo3, double w)
{
double uviejo1;
double uviejo2;
double uviejo3;
double vviejo1;
double vviejo2;
double vviejo3;
double vnuevo1=0.0;
double vnuevo2=0.0;
double vnuevo3=0.0;
ofstream myfilec4;
myfilec4.open("caso4.dat");
for(int i=1; i<=n; i++)
{
vviejo1=vviejo1-((*dt)/2)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vviejo2=vviejo2-((*dt)/2)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vviejo3=vviejo3-((*dt)/2)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
vnuevo1=vviejo1+(*dt)*((-gama*vviejo1-2*k*uviejo1+k*uviejo2+sin(w*(*t)))/m);
vnuevo2=vviejo2+(*dt)*((-gama*vviejo2+k*uviejo1-2*k*uviejo2+k*uviejo3)/m);
vnuevo3=vviejo3+(*dt)*((-gama*vviejo3+k*uviejo2-k*uviejo3)/m);
*unuevo1=uviejo1+vnuevo1*(*dt);
*unuevo2=uviejo2+vnuevo2*(*dt);
*unuevo3=uviejo3+vnuevo3*(*dt);
vviejo1=vnuevo1;
vviejo2=vnuevo2;
vviejo3=vnuevo3;
*t=(*t)+(*dt);
myfilec4<<*unuevo1<<" "<< *unuevo2<<" "<<*unuevo3<<" "<<*t << " " <<w<<std::endl;
//std::cout<<"ssss"<<*unuevo1<< *unuevo2<<*unuevo3<<t_in <<std::endl;
}
myfilec4.close();
}
int main()
{
double t, dt,unuevo1,unuevo2,unuevo3, w1,w2,w3,w4;
t= 0.0;
dt= 0.1;
unuevo1=0.0;
unuevo2=0.0;
unuevo3=0.0;
w1= 0.5*pow(k/m,0.5);
w2= 1.0*pow(k/m,0.5);
w3= 1.5*pow(k/m,0.5);
w4= 2.5*pow(k/m,0.5);
//v = 0.0;
while(t<tf)
{//h = 0.01;
//cout<<unuevo1<<" "<< unuevo2<< " " <<unuevo3<< " " <<t <<endl;
leap_frog(&t, &dt, &unuevo1, &unuevo2, &unuevo3);
caso1(&t, &dt, &unuevo1, &unuevo2, &unuevo3, w1);
caso2(&t, &dt, &unuevo1, &unuevo2, &unuevo3, w2);
caso3(&t, &dt, &unuevo1, &unuevo2, &unuevo3, w3);
caso4(&t, &dt, &unuevo1, &unuevo2, &unuevo3, w4);
}
return 0;
}
| 84f54c5016bf987f68c50b15de327652d5f30e15 | [
"Python",
"Makefile",
"C++"
] | 4 | Python | MariaDelgado10/DelgadoMaria_hw2 | 6fda7eb569462be8efd49403f63c8fe4ceba4fc1 | e1fc614a04e50a2436620887f4b47308fcd154f1 |
refs/heads/main | <file_sep><?php
namespace Ramiz\Contact\Http\Controllers;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Mail;
use Ramiz\Contact\Mail\ContactMailable;
use Ramiz\Contact\Models\Contact;
class ContactController extends Controller
{
public function getContactPage(){
return view('contact::contact');
}
public function sendMailContact(Request $request){
Mail::to(config('contact.send_mail_to'))->send(new ContactMailable($request->message,$request->name));
Contact::create($request->except('_token'));
return redirect()->back();
}
}
<file_sep># Contact Admin Package
The Contact Admin Package is a simple Laravel package that provides functionality to manage contact messages, including name, email, and message fields. It allows you to easily collect and manage contact information from users on your website.
## Features
- Capture and store contact messages from users.
- Easily configure receiver email address for contact messages.
- Seamless integration with Laravel's mail system.
## Installation
To get started with the Contact Admin Package, follow these steps:
1. **Install the Package**:
Install the package using Composer:
```bash
composer require ramiz/contact
```
2. **Set Mail Credentials**:
Add your mail credentials to your `.env` file to ensure the package can send email notifications:
```env
MAIL_MAILER=smtp
MAIL_HOST=smtp.example.com
MAIL_PORT=587
MAIL_USERNAME=your_username
MAIL_PASSWORD=<PASSWORD>
MAIL_ENCRYPTION=tls
```
3. **Configure Receiver Email**:
Set the receiver email address for contact messages in the `config.contact` file located in your Laravel project's `config` directory.
4. **Publish Configuration**:
Publish the package's configuration files:
```env
php artisan vendor:publish --provider="Ramiz\Contact\ContactServiceProvider"
```
5. **Run Migrations**:
Run the database migrations to create the necessary tables:
```env
php artisan migrate
```
## Usage
Once the package is installed and configured, you can easily manage contact messages through the provided routes and views. Access the contact admin panel by navigating to `/contact` in your browser.
## License
This package is open-source software licensed under the [MIT License](LICENSE).
<file_sep><?php
return [
'send_mail_to' => '<EMAIL>',
];
<file_sep><?php
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Route;
use Ramiz\Contact\Http\Controllers\ContactController;
Route::get('contact',[ContactController::class,'getContactPage'])->name('contact');
Route::post('contactme',[ContactController::class,'sendMailContact'])->name('contactMail'); | 0469d1796c6164596f86ac14adebee2744972e5d | [
"Markdown",
"PHP"
] | 4 | PHP | Ramiz8Work/contactus-package | c349ac8d236af1e2e203812be9083d1d9a38e42b | 94cd3fc6774473f46a0d359c09d94900facb8f42 |
refs/heads/master | <repo_name>suil5044/SelfDrivingRobot<file_sep>/ultrasonicsensor/distance_sensor22.py
#!/usr/bin/python
import RPi.GPIO as GPIO
import time
import cv2
try:
GPIO.setmode(GPIO.BOARD)
PIN_TRIGGER1 = 7
PIN_ECHO1 = 11
PIN_TRIGGER2 = 13
PIN_ECHO2 = 15
PIN_TRIGGER3 = 16
PIN_ECHO3 = 18
GPIO.setup(PIN_TRIGGER1, GPIO.OUT)
GPIO.setup(PIN_ECHO1, GPIO.IN)
GPIO.setup(PIN_TRIGGER2, GPIO.OUT)
GPIO.setup(PIN_ECHO2, GPIO.IN)
GPIO.setup(PIN_TRIGGER3, GPIO.OUT)
GPIO.setup(PIN_ECHO3, GPIO.IN)
while True:
GPIO.output(PIN_TRIGGER1, GPIO.LOW)
print "Waiting for sensor to settle"
time.sleep(0.5)
print "Calculating distance"
GPIO.output(PIN_TRIGGER1, GPIO.HIGH)
time.sleep(0.00001)
GPIO.output(PIN_TRIGGER1, GPIO.LOW)
while GPIO.input(PIN_ECHO1)==0:
pulse_start_time = time.time()
while GPIO.input(PIN_ECHO1) ==1:
pulse_end_time = time.time()
pulse_duration = pulse_end_time - pulse_start_time
distance = round(pulse_duration * 17150, 2)
print "Distance1:", distance,"cm"
GPIO.output(PIN_TRIGGER2, GPIO.LOW)
time.sleep(0.5)
GPIO.output(PIN_TRIGGER2, GPIO.HIGH)
time.sleep(0.00001)
GPIO.output(PIN_TRIGGER2, GPIO.LOW)
while GPIO.input(PIN_ECHO2)==0:
pulse_start_time2 = time.time()
while GPIO.input(PIN_ECHO2) ==1:
pulse_end_time2 = time.time()
pulse_duration = pulse_end_time2 - pulse_start_time2
distance = round(pulse_duration * 17150, 2)
print "Distance2:", distance,"cm"
GPIO.output(PIN_TRIGGER3, GPIO.LOW)
time.sleep(0.5)
GPIO.output(PIN_TRIGGER3, GPIO.HIGH)
time.sleep(0.00001)
GPIO.output(PIN_TRIGGER3, GPIO.LOW)
while GPIO.input(PIN_ECHO3)==0:
pulse_start_time3 = time.time()
while GPIO.input(PIN_ECHO3) ==1:
pulse_end_time3 = time.time()
pulse_duration = pulse_end_time3 - pulse_start_time3
distance = round(pulse_duration * 17150, 2)
print "Distance3:", distance,"cm\n"
except KeyboardInterrupt:
print('interuptted!')
finally:
GPIO.cleanup()
print('cleaned')
<file_sep>/ultrasonicsensor/distance_sensor.py
#!/usr/bin/python
import RPi.GPIO as GPIO
import time
import cv2
try:
GPIO.setmode(GPIO.BOARD)
PIN_TRIGGER = 7
PIN_ECHO = 11
GPIO.setup(PIN_TRIGGER, GPIO.OUT)
GPIO.setup(PIN_ECHO, GPIO.IN)
while True:
GPIO.output(PIN_TRIGGER, GPIO.LOW)
print "Waiting for sensor to settle"
time.sleep(1)
print "Calculating distance"
GPIO.output(PIN_TRIGGER, GPIO.HIGH)
time.sleep(0.00001)
GPIO.output(PIN_TRIGGER, GPIO.LOW)
while GPIO.input(PIN_ECHO)==0:
pulse_start_time = time.time()
while GPIO.input(PIN_ECHO) ==1:
pulse_end_time = time.time()
pulse_duration = pulse_end_time - pulse_start_time
distance = round(pulse_duration * 17150, 2)
print "Distance:", distance,"cm"
except KeyboardInterrupt:
print('interuptted!')
finally:
GPIO.cleanup()
print('cleaned')
<file_sep>/ultrasonicsensor/distance_sensor2.py
#!/usr/bin/python
import RPi.GPIO as GPIO
import time
import cv2
try:
GPIO.setmode(GPIO.BOARD)
PIN_TRIGGER1 = 7
PIN_ECHO1 = 11
GPIO.setup(PIN_TRIGGER1, GPIO.OUT)
GPIO.setup(PIN_ECHO1, GPIO.IN)
PIN_TRIGGER2 = 13
PIN_ECHO2 = 15
GPIO.setup(PIN_TRIGGER2, GPIO.OUT)
GPIO.setup(PIN_ECHO2, GPIO.IN)
while True:
GPIO.output(PIN_TRIGGER1, GPIO.LOW)
GPIO.output(PIN_TRIGGER2, GPIO.LOW)
print "Waiting for sensor to settle"
time.sleep(1)
print "Calculating distance"
GPIO.output(PIN_TRIGGER1, GPIO.HIGH)
GPIO.output(PIN_TRIGGER2, GPIO.HIGH)
time.sleep(0.00001)
GPIO.output(PIN_TRIGGER1, GPIO.LOW)
while GPIO.input(PIN_ECHO1)==0 and GPIO.input(PIN_ECHO2) ==0:
pulse_start_time1 = time.time()
pulse_start_time2 = time.time()
while GPIO.input(PIN_ECHO1) ==1 and GPIO.input(PIN_ECHO2) ==0:
pulse_end_time1 = time.time()
pulse_end_time2 = time.time()
pulse_duration1 = pulse_end_time1 - pulse_start_time1
distance1 = round(pulse_duration1 * 17150, 2)
print "Distance1:", distance1,"cm"
pulse_duration2 = pulse_end_time2 - pulse_start_time2
distance2 = round(pulse_duration2 *17150, 2)
print "Distance2:", distance2,"cm"
except KeyboardInterrupt:
print('interuptted!')
finally:
GPIO.cleanup()
print('cleaned')
| a616e1385d8d223d69c9d5cae0bcf66324d9f260 | [
"Python"
] | 3 | Python | suil5044/SelfDrivingRobot | e6064f0c6a528906d5582e6f4550fe3e27b5bcd6 | 5bc8aa819acde097fb8620c18e9ae19ac10bad9f |
refs/heads/master | <repo_name>santeive/compara-shop<file_sep>/core/views.py
from django.views.generic.base import TemplateView
from django.shortcuts import render
import sqlite3
class HomePageView(TemplateView):
def get(self, request, *args, **kwargs):
with sqlite3.connect("stores.sqlite3") as conn:
cursorObj = conn.cursor()
#Products
cursorObj.execute('SELECT count(id) FROM linio')
products = cursorObj.fetchone()
#Brands
cursorObj.execute('SELECT count(DISTINCT brand) FROM linio')
brands = cursorObj.fetchone()
#Categories
cursorObj.execute('SELECT COUNT(DISTINCT category) FROM linio')
rows = cursorObj.fetchone()
#Sellers
cursorObj.execute('SELECT COUNT(DISTINCT seller) FROM linio')
seller = cursorObj.fetchone()
context = {
'categories': rows[0],
'products': products[0],
'brands': brands[0],
'seller': seller[0]
}
return render(request, "core/home.html", context)
class SamplePageView(TemplateView):
template_name = "core/sample.html"<file_sep>/stores/views.py
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
import sqlite3
# Create your views here.
def claroshop(request):
with sqlite3.connect("stores.sqlite3") as conn:
cursorObj = conn.cursor()
#Products
cursorObj.execute('SELECT count(id) FROM claro')
products = cursorObj.fetchone()
#Brands
cursorObj.execute('SELECT count(DISTINCT brand) FROM claro')
brands = cursorObj.fetchone()
#Categories
cursorObj.execute('SELECT COUNT(DISTINCT category) FROM claro')
rows = cursorObj.fetchone()
#STOCK
cursorObj.execute('SELECT COUNT(stock) FROM claropriceproducts WHERE stock > 0')
stock_av = cursorObj.fetchone()
context = {
'categories': rows[0],
'products': products[0],
'brands': brands[0],
'stock_av': stock_av[0],
}
return render(request, 'stores/claroshop.html', context)
def linio(request):
with sqlite3.connect("stores.sqlite3") as conn:
cursorObj = conn.cursor()
#Products
cursorObj.execute('SELECT count(sku) FROM claro')
products = cursorObj.fetchone()
#Brands
cursorObj.execute('SELECT count(DISTINCT brand) FROM claro')
brands = cursorObj.fetchone()
#Categories
cursorObj.execute('SELECT COUNT(DISTINCT category) FROM claro')
rows = cursorObj.fetchone()
context = {
'categories': rows[0],
'products': products[0],
'brands': brands[0]
}
return render(request, 'stores/linio.html', context)
def walmart(request):
titulo = 'Pagina base'
context = {
'titulo': titulo
}
return render(request, 'stores/walmart.html', context)<file_sep>/registration/models.py
from django.db import models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
avatar = models.ImageField(upload_to = 'profiles', null=True, blank=True)
bio = models.TextField(null=True, blank=True)
link = models.URLField(max_length=200, null=True, blank=True)
# Señales para comprobar que el perfil se crea una vez que se crea una cuenta
@receiver(post_save, sender=User)
def ensure_profile_exists(sender, instance, **kwargs):
if kwargs.get('created', False):
Profile.objects.get_or_create(user=instance)
print("Se acaba de crear un usuario y su perfil enlazado")<file_sep>/pages/urls.py
from django.urls import path
from .views import PageListView, PageDetailView, PageCreate, PageUpdate, PageDelete
#Por que el cambio de url_patterns a page_patterns
pages_patterns = ([
# Las urls convencionales son tipo:
# path('pages', include('core.urls'))
#Se devuelve cuna class bassed view como vista.asview()
path('', PageListView.as_view(), name='pages'),
path('<int:pk>/<slug:slug>/', PageDetailView.as_view(), name='page'),
#Se devuelve como una vista
path('create/', PageCreate.as_view(), name='create'),
path('update/<int:pk>/', PageUpdate.as_view(), name='update'),
path('delete/<int:pk>/', PageDelete.as_view(), name='delete'),
], 'pages')<file_sep>/stores/urls.py
from django.urls import path,include
from django.contrib.auth import views as auth_views
from . import views
app_name = "stores"
urlpatterns = [
path('claroshop/', views.claroshop, name="claroshop"),
path('linio/', views.linio, name="linio"),
path('walmart/', views.walmart, name="walmart"),
]<file_sep>/registration/forms.py
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import Profile
class UserCreationFormWithEmail(UserCreationForm):
email = forms.EmailField(required=True, help_text="Requerido, 245 caracteres como máximo y debe ser válido")
# Redefinimos la clase meta
# La clase UserCreationFormWithEmail se importa en el views.py
class Meta:
#Email ya es un campo de user
model = User
fields = ("username", "email", "password1", "<PASSWORD>")
#Si nosotros sobre escribimos el campo widgets, estariamos perdiendo las validaciones
# y configuraciones que ya tienen nuestro formulario
#Toto metodo de una clase recibe un self
#ESTA VALIDACION COMPRUEBA SI EXISTE UAN EMAIL CON ESA DIRECCION ANTES DE GUARDARLO
def clean_email(self):
email = self.cleaned_data.get("email") # Recuperamos el email que estamos validando
if User.objects.filter(email = email).exists(): #Devuelve un query set o lista vacia
raise forms.ValidationError("El email ya está registrado, prueba con otro")
return email
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['avatar', 'bio', 'link']
widgets = {
'avatar': forms.ClearableFileInput(attrs={'class':'form-control-file mt-3'}),
'bio': forms.Textarea(attrs={'class':'form-control mt-3', 'rows':3, 'placeholder':'Biografía'}),
'link': forms.URLInput(attrs={'class':'form-control mt-3', 'placeholder':'Enlace'}),
}
class EmailForm(forms.ModelForm):
email = forms.EmailField(required=True, help_text="Requerido, 245 caracteres como máximo y debe ser válido")
class Meta:
model = User
fields = ['email']
def clean_email(self):
email = self.cleaned_data.get("email")
# Verificamos si el email se ha modificado
if 'email' in self.changed_data:
#Es una lista de todos los campos que se han editado en el formulario
if User.objects.filter(email = email).exists():
raise forms.ValidationError("El email ya está registrado, prueba con otro")
return email
| 9a8420fcf77283fb4ed4e16e74647d16b1e894ac | [
"Python"
] | 6 | Python | santeive/compara-shop | 7296b0c2fed7e5bad0275e616b57ab1be911047b | c39ac78ab404db6b351dff8cfbf631a65648700a |
refs/heads/master | <file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\HasMany;
/**
* Class StepsMap
* @package App
*/
class StepsMap extends Model
{
public const ACOES_DE_VALIDACAO = 19;
public const ACOES_DE_VERIFICACAO = 12;
public const ANALISE_DO_FORNECEDOR = 3;
public const ATUALIZACOES_E_CORRECOES_ECR = 7;
public const ATUALIZACOES_E_CORRECOES_EDR = 14;
public const ATUALIZACOES_E_CORRECOES_QER = 21;
public const DESENVOLVIMENTO_DA_FERRAMENTA = 9;
public const ESPECIFICACOES_PRELIMINARES = 1;
public const ITEM_LIBERADO_PARA_PRODUCAO = 22;
public const LOTE_1 = 5;
public const LOTE_2 = 11;
public const LOTE_3 = 18;
public const PLANO_DE_CONTROLE_DE_MANUFATURA = 17;
public const PLANO_DE_CONTROLE_DE_PROTOTIPO = 2;
public const PLANO_DE_CONTROLE_DE_QUALIDADE = 8;
public const QUALIFICACAO_DO_FORNECEDOR = 15;
public const RESULTADOS_ACEITAVEIS_ECR = 6;
public const RESULTADOS_ACEITAVEIS_EDR = 13;
public const RESULTADOS_ACEITAVEIS_QER = 20;
public const SOLICITACAO_DO_LOTE_1 = 4;
public const SOLICITACAO_DO_LOTE_2 = 10;
public const SOLICITACAO_DO_LOTE_3 = 16;
/**
* @var string[]
*/
protected $fillable = [
'name',
'ui_id',
'type',
'phase'
];
/**
* @return HasMany
*/
public function mappedStates(): HasMany
{
return $this->hasMany(StepStatesMap::class, 'step_id');
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\Relations\HasMany;
use Illuminate\Database\Eloquent\SoftDeletes;
/**
* Class Item
* @package App
*/
class Item extends Model
{
use SoftDeletes;
/**
* @var string[]
*/
protected $fillable = [
'code',
'description',
'status_id',
'project_id',
'nature_id',
'type_id'
];
/**
* @return BelongsTo
*/
public function project(): BelongsTo
{
return $this->belongsTo(Project::class, 'project_id');
}
/**
* @return BelongsTo
*/
public function status(): BelongsTo
{
return $this->belongsTo(Status::class, 'status_id');
}
/**
* @return HasMany
*/
public function steps(): HasMany
{
return $this->hasMany(Step::class);
}
/**
* @param $step_map_id
* @return Model|HasMany|object|null
*/
public function step($step_map_id)
{
return $this->steps()->where('step_map_id', $step_map_id)->first();
}
/**
* @return BelongsTo
*/
public function nature()
{
return $this->belongsTo(Nature::class, 'nature_id');
}
/**
* @return BelongsTo
*/
public function type()
{
return $this->belongsTo(Type::class, 'type_id');
}
}
<file_sep>function redirectTo(locale) {
window.location = locale;
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Project;
use App\Services\ProjetcsService;
use Exception;
use Illuminate\Contracts\Foundation\Application;
use Illuminate\Contracts\View\Factory;
use Illuminate\Http\RedirectResponse;
use Illuminate\Http\Request;
use Illuminate\Http\Response;
use Illuminate\Support\Facades\Auth;
use Illuminate\View\View;
/**
* Class ProjectsController
* @package App\Http\Controllers
*/
class ProjectsController extends Controller
{
/**
* @var ProjetcsService
*/
private ProjetcsService $projectsService;
/**
* ProjectsController constructor.
* @param ProjetcsService $projectsService
*/
public function __construct(ProjetcsService $projectsService)
{
$this->projectsService = $projectsService;
}
/**
* Display a listing of the resource.
*
* @return Application|Factory|View
*/
public function index()
{
$projects = $this->projectsService->all();
return view('projects.index', compact('projects'));
}
/**
* Show the form for creating a new resource.
*
* @return Application|Factory|View
*/
public function create()
{
return view('projects.form');
}
/**
* Store a newly created resource in storage.
*
* @param Request $request
* @return RedirectResponse
*/
public function store(Request $request)
{
$request->validate([
'code' => 'required',
'name' => 'required'
]);
$data = $request->except([
'_token'
]);
$data['user_id'] = Auth::id();
$this->projectsService->create($data);
return redirect()->back()->with('success', 'Projeto criado com sucesso');
}
/**
* Display the specified resource.
*
* @param Project $projects
* @return Response
*/
public function show(Project $projects)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param Project $project
* @return Application|Factory|View
*/
public function edit(Project $project)
{
return view('projects.form', compact('project'));
}
/**
* Update the specified resource in storage.
*
* @param Request $request
* @param Project $project
* @return RedirectResponse
*/
public function update(Request $request, Project $project)
{
$request->validate([
'code' => 'required',
'name' => 'required'
]);
$this->projectsService->update($project, $request->except([
'_token'
]));
return redirect()->back()->with('success', 'Projeto editado com sucesso');
}
/**
* Remove the specified resource from storage.
*
* @param Project $project
* @return RedirectResponse
* @throws Exception
*/
public function destroy(Project $project)
{
$this->projectsService->delete($project);
return redirect()->back()->with('success', 'Projeto deletado com sucesso');
}
}
<file_sep><?php
namespace App\Repositories;
use App\Project;
use Illuminate\Database\Eloquent\Model;
class ProjectsRepository implements RepositoryInterface
{
public function all()
{
return Project::all();
}
public function create(array $data)
{
return Project::create($data);
}
public function update(Model $resource, array $data)
{
Project::find($resource->id)
->update($data);
}
public function delete(Model $resource)
{
return $resource->delete();
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class StatesMap extends Model
{
public const FILE = 'F';
public const TEXT = 'T';
public const DATE = 'D';
public const INPUT = 'I';
public const OUTPUT = 'O';
public const BILL_OF_MATERIAL_BOM = 1;
public const ESTRUTURA_FUNCIONAL_DO_PRODUTO = 2;
public const ESPECIFICACOES_PRELIMINARES = 3;
public const ETAPAS_DO_PROCESSO = 4;
public const OUTROS_ARQUIVOS = 5;
public const PLANO_DE_CONTROLE_DE_PROTOTIPO = 6;
public const HISTORICO_DE_RISCO_DO_FORNECEDOR = 7;
public const RETORNO_DO_FORNECEDOR = 8;
public const SOLICITACAO_COM_O_FORNECEDOR = 9;
public const PRAZO_DE_ENTREGA_DA_SOLICITACAO = 10;
public const RESPONSAVEL_DA_ENGENHARIA = 11;
public const RESPONSAVEL_DA_QUALIDADE = 12;
public const RESPONSAVEL_DA_FÁBRICA = 13;
public const RELATORIO_DE_RESULTADOS_DO_LOTE_1 = 14;
public const ATA_DE_REUNIAO_DE_CHECKPOINT = 15;
public const PLANO_DE_ACOES_DE_CORRECAO = 16;
public const DOCUMENTOS_ADICIONAIS = 17;
public const PLANO_DE_CONTROLE_DE_QUALIDADE = 18;
public const RELATORIO_DE_DESENVOLVIMENTO_DO_FORNECEDOR = 19;
public const RELATORIO_DE_RESULTADOS_DO_LOTE_2 = 20;
public const RELATORIO_DOS_RESULTADOS_DE_VERIFICACAO_2 = 21;
public const CONTRATO_DE_SERVICOS_PRESTADOS = 22;
public const RELATORIO_DE_QUALIFICACAO_DO_FORNECEDOR = 23;
public const PLANO_DE_CONTROLE_DE_MANUFATURA = 24;
public const RELATORIO_DE_RESULTADOS_DO_LOTE_3 = 25;
public const RELATORIO_DE_RESULTADOS_DE_VALIDACAO = 26;
protected $fillable = [
'name',
'type',
'is_mandatory'
];
}
<file_sep><?php
namespace App\Services;
use App\Item;
use App\PreviousSteps;
use App\Repositories\ItemsRepository;
use App\Step;
use App\StepsMap;
use Exception;
use Illuminate\Database\Eloquent\Model;
/**
* Class ItemsService
* @package App\Services
*/
class ItemsService implements ServiceInterface
{
/**
* @var ItemsRepository
*/
private ItemsRepository $itemsRepository;
/**
* @var Stepservice
*/
private Stepservice $stepService;
/**
* ItemsService constructor.
* @param ItemsRepository $itemsRepository
* @param Stepservice $stepService
*/
public function __construct(ItemsRepository $itemsRepository, Stepservice $stepService)
{
$this->itemsRepository = $itemsRepository;
$this->stepService = $stepService;
}
/**
*
*/
public function all()
{
return $this->itemsRepository->all();
}
/**
* @param array $data
* @return Item
*/
public function create(array $data): Item
{
/* @var Item $item */
$item = $this->itemsRepository->create($data);
foreach (StepsMap::all() as $stepMap) {
$this->stepService->create([
'item_id' => $item->id,
'step_map_id' => $stepMap->id,
'status' => Step::UNCHECKED
]);
}
$this->stepService->setupWorkflow($item);
return $item;
}
/**
* @param Model $resource
* @param array $data
* @return mixed
*/
public function update(Model $resource, $data)
{
return $this->itemsRepository->update($resource, $data);
}
/**
* @param Model $model
* @return bool|null
* @throws Exception
*/
public function delete(Model $model): ?bool
{
return $this->itemsRepository->delete($model);
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\Relations\HasMany;
/**
* Class State
* @package App
*/
class State extends Model
{
/**
* @var string[]
*/
protected $fillable = [
'step_id',
'state_map_id',
'value'
];
/**
* @return BelongsTo
*/
public function step(): BelongsTo
{
return $this->belongsTo(Step::class, 'step_id');
}
/**
* @return BelongsTo
*/
public function stateInformation(): BelongsTo
{
return $this->belongsTo(StatesMap::class, 'state_map_id');
}
/**
* @param $step_map_id
* @return mixed
*/
public function stateStepInformation($step_map_id)
{
return StepStatesMap::where([
'step_id' => $step_map_id,
'state_id' => $this->state_map_id
]);
}
/**
* @return HasMany
*/
public function files(): HasMany
{
return $this->hasMany(File::class, 'state_id');
}
}
<file_sep><?php
use App\StatesMap;
use App\StepsMap;
use App\StepStatesMap;
use Illuminate\Database\Seeder;
/**
* Class StepsStatesMapSeeder
*/
class StepsStatesMapSeeder extends Seeder
{
/**
* @var array[]
* format: [
* step_id => [
* [
* state_id,
* type(Input or Output)
* ], ...
* ]
* ]
*/
private $map = [
StepsMap::ESPECIFICACOES_PRELIMINARES => [
[
'state_id' => StatesMap::BILL_OF_MATERIAL_BOM,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ESTRUTURA_FUNCIONAL_DO_PRODUTO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ESPECIFICACOES_PRELIMINARES,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::ETAPAS_DO_PROCESSO,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::OUTROS_ARQUIVOS,
'type' => StatesMap::OUTPUT
],
],
StepsMap::PLANO_DE_CONTROLE_DE_PROTOTIPO => [
[
'state_id' => StatesMap::BILL_OF_MATERIAL_BOM,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ETAPAS_DO_PROCESSO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ESPECIFICACOES_PRELIMINARES,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_PROTOTIPO,
'type' => StatesMap::OUTPUT
],
],
StepsMap::ANALISE_DO_FORNECEDOR => [
[
'state_id' => StatesMap::ETAPAS_DO_PROCESSO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ESPECIFICACOES_PRELIMINARES,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::HISTORICO_DE_RISCO_DO_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RETORNO_DO_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
],
StepsMap::SOLICITACAO_DO_LOTE_1 => [
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_PROTOTIPO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::SOLICITACAO_COM_O_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::PRAZO_DE_ENTREGA_DA_SOLICITACAO,
'type' => StatesMap::OUTPUT
],
],
StepsMap::LOTE_1 => [
[
'state_id' => StatesMap::PRAZO_DE_ENTREGA_DA_SOLICITACAO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_ENGENHARIA,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_QUALIDADE,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_FÁBRICA,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_1,
'type' => StatesMap::OUTPUT
],
],
StepsMap::RESULTADOS_ACEITAVEIS_ECR => [
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_1,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ATA_DE_REUNIAO_DE_CHECKPOINT,
'type' => StatesMap::OUTPUT
],
],
StepsMap::ATUALIZACOES_E_CORRECOES_ECR => [
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_1,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::PLANO_DE_ACOES_DE_CORRECAO,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::DOCUMENTOS_ADICIONAIS,
'type' => StatesMap::OUTPUT
],
],
StepsMap::PLANO_DE_CONTROLE_DE_QUALIDADE => [
[
'state_id' => StatesMap::ESPECIFICACOES_PRELIMINARES,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_1,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_QUALIDADE,
'type' => StatesMap::OUTPUT
],
],
StepsMap::DESENVOLVIMENTO_DA_FERRAMENTA => [
[
'state_id' => StatesMap::ESPECIFICACOES_PRELIMINARES,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_1,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_DESENVOLVIMENTO_DO_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
],
StepsMap::SOLICITACAO_DO_LOTE_2 => [
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_QUALIDADE,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::SOLICITACAO_COM_O_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::PRAZO_DE_ENTREGA_DA_SOLICITACAO,
'type' => StatesMap::OUTPUT
],
],
StepsMap::LOTE_2 => [
[
'state_id' => StatesMap::PRAZO_DE_ENTREGA_DA_SOLICITACAO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_ENGENHARIA,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_QUALIDADE,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_FÁBRICA,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_2,
'type' => StatesMap::OUTPUT
],
],
StepsMap::ACOES_DE_VERIFICACAO => [
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_QUALIDADE,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DOS_RESULTADOS_DE_VERIFICACAO_2,
'type' => StatesMap::OUTPUT
],
],
StepsMap::RESULTADOS_ACEITAVEIS_EDR => [
[
'state_id' => StatesMap::RELATORIO_DOS_RESULTADOS_DE_VERIFICACAO_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ATA_DE_REUNIAO_DE_CHECKPOINT,
'type' => StatesMap::OUTPUT
],
],
StepsMap::ATUALIZACOES_E_CORRECOES_EDR => [
[
'state_id' => StatesMap::RELATORIO_DOS_RESULTADOS_DE_VERIFICACAO_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::PLANO_DE_ACOES_DE_CORRECAO,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::DOCUMENTOS_ADICIONAIS,
'type' => StatesMap::OUTPUT
],
],
StepsMap::QUALIFICACAO_DO_FORNECEDOR => [
[
'state_id' => StatesMap::HISTORICO_DE_RISCO_DO_FORNECEDOR,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_DESENVOLVIMENTO_DO_FORNECEDOR,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::CONTRATO_DE_SERVICOS_PRESTADOS,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_QUALIFICACAO_DO_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
],
StepsMap::PLANO_DE_CONTROLE_DE_MANUFATURA => [
[
'state_id' => StatesMap::ESPECIFICACOES_PRELIMINARES,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DOS_RESULTADOS_DE_VERIFICACAO_2,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_MANUFATURA,
'type' => StatesMap::OUTPUT
],
],
StepsMap::SOLICITACAO_DO_LOTE_3 => [
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_MANUFATURA,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::SOLICITACAO_COM_O_FORNECEDOR,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::PRAZO_DE_ENTREGA_DA_SOLICITACAO,
'type' => StatesMap::OUTPUT
],
],
StepsMap::LOTE_3 => [
[
'state_id' => StatesMap::PRAZO_DE_ENTREGA_DA_SOLICITACAO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_ENGENHARIA,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_QUALIDADE,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RESPONSAVEL_DA_FÁBRICA,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_3,
'type' => StatesMap::OUTPUT
],
],
StepsMap::ACOES_DE_VALIDACAO => [
[
'state_id' => StatesMap::PLANO_DE_CONTROLE_DE_MANUFATURA,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_3,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DE_VALIDACAO,
'type' => StatesMap::OUTPUT
],
],
StepsMap::RESULTADOS_ACEITAVEIS_QER => [
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DE_VALIDACAO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_3,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::ATA_DE_REUNIAO_DE_CHECKPOINT,
'type' => StatesMap::OUTPUT
],
],
StepsMap::ATUALIZACOES_E_CORRECOES_QER => [
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DE_VALIDACAO,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::RELATORIO_DE_RESULTADOS_DO_LOTE_3,
'type' => StatesMap::INPUT
],
[
'state_id' => StatesMap::PLANO_DE_ACOES_DE_CORRECAO,
'type' => StatesMap::OUTPUT
],
[
'state_id' => StatesMap::DOCUMENTOS_ADICIONAIS,
'type' => StatesMap::OUTPUT
],
]
];
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
foreach ($this->map as $stepId => $states) {
foreach ($states as $state) {
StepStatesMap::create([
'step_id' => $stepId,
'state_id' => $state['state_id'],
'type' => $state['type']
]);
}
}
}
}
<file_sep><?php
namespace App\View\Components;
use Illuminate\View\Component;
/**
* Class SidebarItem
* @package App\View\Components
*/
class SidebarItem extends Component
{
/**
* @var bool
*/
public $active;
/**
* @var string
*/
public $icon;
/**
* @var string
*/
public $name;
/**
* @var
*/
public $url;
/**
* Create a new component instance.
*
* @param $active
* @param $icon
* @param $name
*/
public function __construct($active, $icon, $name, $url)
{
$this->active = $active;
$this->icon = $icon;
$this->name = $name;
$this->url = $url;
}
/**
* Get the view / contents that represent the component.
*
* @return \Illuminate\View\View|string
*/
public function render()
{
return view('components.sidebar-item');
}
}
<file_sep><?php
/** @var \Illuminate\Database\Eloquent\Factory $factory */
use App\Item;
use App\Project;
use Faker\Generator as Faker;
$factory->define(Item::class, function (Faker $faker) {
$code = implode('', $faker->randomElements(range('A', 'Z'), 3))
. '_' . implode('', $faker->randomElements(range(1, 100)));
return [
'code' => $code,
'description' => $faker->text(),
'status_id' => 1,
'project_id' => Project::first()
];
});
<file_sep><?php
use App\Type;
use Illuminate\Database\Seeder;
class TypesSeeder extends Seeder
{
private $types = [
'Elétrico',
'Embalagem',
'Documento',
'Metálico',
'Não-Metálico',
'Ceramico',
'Polimérico',
'Compósito'
];
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
foreach ($this->types as $type) {
Type::create([
'name' => $type
]);
}
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Activity extends Model
{
protected $fillable = [
'description',
'checked',
'step_id'
];
public static array $stepActivitiesMap = [
StepsMap::ESPECIFICACOES_PRELIMINARES => [
'Criar documentação técnica, desenho técnico, processo produtivo, ferramentas, equipamentos e matéria prima.',
'Se necessário, descrever processo produtivo, suas etapas, ferramentas/equipamentos, tempos e materiais utilizados.',
'Determinar tolerâncias dimensionais e geométricas.',
'Realizar simulações de CAE para avaliar parâmetros.'
],
StepsMap::PLANO_DE_CONTROLE_DE_PROTOTIPO => [
'Determinar os tipos de protótipos para os testes: Rapid Prototype, Model ou Physical Prototype.',
'Selecionar finalidade da avaliação, o material e quantidade de amostras.',
'Criar plano de controle do protótipo.'
],
StepsMap::ANALISE_DO_FORNECEDOR => [
'Enviar documentação necessária para avaliação do fornecedor.',
'Requisitar de RQF',
'Realizar análise de riscos da empresa.',
'Fazer análise de DFM do fornecedor.',
'Incluir análise de localização com aspectos de estoque em trânsito e estoque de segurança.',
'Verificar do possível histórico de fornecimento.'
],
StepsMap::SOLICITACAO_DO_LOTE_1 => [
'Definir qual a dimensão do pedido que será solicitado.',
'Analisar o orçamento imposto pelo fornecedor.',
'Negociar a data prevista para a entrega da solicitação.',
],
StepsMap::LOTE_1 => [
'Selecionar os responsáveis de cada área que irão acompanhar o lote.',
'Analisar os pontos importantes, visando ajustes futuros.',
'Pontuar características importantes relacionada a avaliação do design do produto.',
'Elaborar um relatório de resultados.'
],
StepsMap::RESULTADOS_ACEITAVEIS_ECR => [
'Realizar reunião de debate do fim de etapa.',
'Analisar todos resultados obtidos até então.',
'Julgar o prosseguimento ou não do projeto.'
],
StepsMap::ATUALIZACOES_E_CORRECOES_ECR => [
'Determinar ações necessárias conforme discussão realizada anteriormente.',
'Criar plano de ações para ser executado.',
'Definir quais etapas deverão ser retrabalhadas.'
],
StepsMap::PLANO_DE_CONTROLE_DE_QUALIDADE => [
'Definir as propriedades que devem ser avaliadas.',
'Estipular cronograma, amostras e executores dos testes.',
'Utilizar diagrama-P para realizar o controle dos parâmetros.',
'Determinar os melhores testes para obter os melhores resultados.',
'Criar diagrama de causa e efeito para encontrar e mapear as principais falhas encontradas. ',
'Elaborar um plano de controle de qualidade com todas as informações necessárias. '
],
StepsMap::DESENVOLVIMENTO_DA_FERRAMENTA => [
'Realizar o projeto informacional e preliminar da ferramenta/equipamento.',
'Criar um cronograma de prazos para entregas e avaliações.',
'Entrega do projeto detalhado.',
'Receber relatório de desenvolvimento do fornecedor.'
],
StepsMap::SOLICITACAO_DO_LOTE_2 => [
'Definir qual a dimensão do pedido que será solicitado.',
'Analisar o orçamento imposto pelo fornecedor.',
'Negociar a data prevista para a entrega da solicitação.'
],
StepsMap::LOTE_2 => [
'Selecionar os responsáveis de cada área que irão acompanhar o lote.',
'Analisar os pontos importantes, visando ajustes futuros.',
'Pontuar características importantes relacionada a avaliação do processo do produto.',
'Elaborar um relatório de resultados.'
],
StepsMap::ACOES_DE_VERIFICACAO => [
'Organizar o cronograma de testes que devem ser realizados.',
'Determinar os executores de cada um destes testes selecionados anteriormente.',
'Utilizar o método de inspeção para verificar o recebimento das amostras.',
'Utilizar o método de testes para avaliar os pontos necessários estipulados no plano de controle de qualidade.',
'Analisar os resultados obtidos com as ações.',
'Criar relatório de verificação dos componentes.'
],
StepsMap::RESULTADOS_ACEITAVEIS_EDR => [
'Realizar reunião de debate do fim de etapa.',
'Analisar todos resultados obtidos até então.',
'Julgar o prosseguimento ou não do projeto.',
],
StepsMap::ATUALIZACOES_E_CORRECOES_EDR => [
'Determinar ações necessárias conforme discussão realizada anteriormente.',
'Criar plano de ações para ser executado.',
'Definir quais etapas deverão ser retrabalhadas.'
],
StepsMap::QUALIFICACAO_DO_FORNECEDOR => [
'Finalizar o contrato que será estabelecido com o fornecedor.',
'Realizar uma avaliação completa da empresa em questão conforme parâmetros tradicionalmente estabelecidos.',
'Atualizar, caso preciso, o histórico do fornecedor para consultas futuras.',
],
StepsMap::PLANO_DE_CONTROLE_DE_MANUFATURA => [
'Determinar testes e critérios de análises para serem validados.',
'Estipular cronograma destas atividades.',
'Se necessário, compor uma Bill of Manufacture (BOMfr).',
'Criar um plano de controle de manufatura para o item em questão.'
],
StepsMap::SOLICITACAO_DO_LOTE_3 => [
'Definir qual a dimensão do pedido que será solicitado.',
'Analisar o orçamento imposto pelo fornecedor.',
'Negociar a data prevista para a entrega da solicitação.'
],
StepsMap::LOTE_3 => [
'Selecionar os responsáveis de cada área que irão acompanhar o lote.',
'Analisar os pontos importantes, visando ajustes futuros.',
'Pontuar características importantes relacionada a validação do processo e do produto na linha de produção.',
'Elaborar um relatório de resultados.'
],
StepsMap::ACOES_DE_VALIDACAO => [
'Organizar o cronograma de testes que devem ser realizados.',
'Determinar os executores de cada um destes testes selecionados anteriormente.',
'Utilizar o método de análise por meio de softwares de CAM e Lean Simulation.',
'Selecionar testes de validação: Teste de uso, Teste de vida, Teste de ambiente, Teste de fiabilidade e Teste de embalagem.',
'Analisar os resultados obtidos com as ações.',
'Criar relatório de validação dos componentes.'
],
StepsMap::RESULTADOS_ACEITAVEIS_QER => [
'Realizar reunião de debate do fim de etapa.',
'Analisar todos resultados obtidos até então.',
'Julgar a finalização e liberação do item para produção.'
],
StepsMap::ATUALIZACOES_E_CORRECOES_QER => [
'Determinar ações necessárias conforme discussão realizada anteriormente.',
'Criar plano de ações para ser executado.',
'Definir quais etapas deverão ser retrabalhadas.'
]
];
}
<file_sep><?php
use App\Nature;
use Illuminate\Database\Seeder;
class NaturesSeeder extends Seeder
{
private $natures = [
'Comprado Padronizado',
'Manufaturado Interno',
'Manufaturado Externo'
];
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
foreach ($this->natures as $nature) {
Nature::create([
'name' => $nature
]);
}
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\Relations\HasMany;
use Illuminate\Database\Eloquent\SoftDeletes;
/**
* Class Project
* @package App
*/
class Project extends Model
{
use SoftDeletes;
/**
* @var string[]
*/
protected $fillable = [
'code',
'name',
'user_id',
'responsible'
];
/**
* @return BelongsTo
*/
public function user(): BelongsTo
{
return $this->belongsTo(User::class);
}
/**
* @return HasMany
*/
public function items(): HasMany
{
return $this->hasMany(Item::class, 'project_id');
}
/**
*
*/
protected static function boot()
{
parent::boot();
static::deleted(static function (Project $project) {
$project->items()->delete();
});
}
}
<file_sep><?php
namespace App\Repositories;
use Illuminate\Database\Eloquent\Model;
interface RepositoryInterface
{
public function all();
public function create(array $data);
public function update(Model $resource, array $data);
public function delete(Model $resource);
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class StepStatesMap extends Model
{
protected $table = 'steps_states_map';
protected $fillable = [
'step_id',
'state_id',
'type'
];
public function stateInformation()
{
return $this->belongsTo(StatesMap::class, 'state_id');
}
public function stepInformation()
{
return $this->belongsTo(Step::class, 'step_id');
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
/**
* Class PreviousSteps
* @package App
*/
class PreviousSteps extends Model
{
/**
* @var string[]
*/
protected $fillable = [
'step_id',
'previous_step_id'
];
/**
* @return BelongsTo
*/
public function step(): BelongsTo
{
return $this->belongsTo(Step::class, 'step_id');
}
/**
* @return BelongsTo
*/
public function previousStep(): BelongsTo
{
return $this->belongsTo(Step::class, 'previous_step_id');
}
}
<file_sep><?php
namespace App\Repositories;
use App\Item;
use Illuminate\Database\Eloquent\Model;
/**
* Class ItemsRepository
* @package App\Repositories
*/
class ItemsRepository implements RepositoryInterface
{
/**
* @return Item[]|\Illuminate\Database\Eloquent\Collection
*/
public function all()
{
return Item::all();
}
/**
* @param array $data
* @return mixed
*/
public function create(array $data)
{
return Item::create($data);
}
/**
* @param Model $resource
* @param array $data
* @return mixed
*/
public function update(Model $resource, array $data)
{
return $resource->update($data);
}
/**
* @param Model $resource
* @return bool|null
* @throws \Exception
*/
public function delete(Model $resource)
{
return $resource->delete();
}
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Services\Stepservice;
use App\Step;
use Illuminate\Contracts\Foundation\Application;
use Illuminate\Contracts\View\Factory;
use Illuminate\Http\RedirectResponse;
use Illuminate\Http\Request;
use Illuminate\Http\Response;
use Illuminate\Support\MessageBag;
use Illuminate\View\View;
class StepsController extends Controller
{
private Stepservice $stepService;
/**
* StepsController constructor.
* @param Stepservice $stepService
*/
public function __construct(Stepservice $stepService)
{
$this->stepService = $stepService;
}
/**
* Show the form for editing the specified resource.
*
* @param Step $step
* @return Application|Factory|View
*/
public function edit(Step $step)
{
$hidePanelHeader = true;
return view('steps.form', compact('step', 'hidePanelHeader'));
}
/**
* Update the specified resource in storage.
*
* @param Request $request
* @param Step $step
* @return RedirectResponse
*/
public function update(Request $request, Step $step)
{
$updated = $this->stepService->update($step, $request);
$redirect = redirect()->route('items.workflow', ['item' => $step->item->id]);
return $updated
? $redirect->with('success', 'Etapa atualizada com sucesso')
: $redirect->with('errors', new MessageBag([
'Não é permitido editar o item se as etapas anteriores não forem cumpridas'
]));
}
}
<file_sep><?php
namespace App\Services;
use Illuminate\Database\Eloquent\Model;
interface ServiceInterface
{
public function all();
public function create(array $data);
public function update(Model $resource, $data);
public function delete(Model $model);
}
<file_sep><?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateStepsStatesMapTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('steps_states_map', static function (Blueprint $table) {
$table->id();
$table->unsignedBigInteger('step_id');
$table->unsignedBigInteger('state_id');
$table->char('type');
$table->foreign('step_id')->references('id')->on('steps_maps');
$table->foreign('state_id')->references('id')->on('states_maps');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('steps_states_map');
}
}
<file_sep><?php
/** @var \Illuminate\Database\Eloquent\Factory $factory */
use App\Project;
use Faker\Generator as Faker;
$factory->define(Project::class, function (Faker $faker) {
$code = implode('', $faker->randomElements(range('A', 'Z'), 3))
. '_' . implode('', $faker->randomElements(range(1, 100)));
return array(
'name' => $faker->company,
'code' => $code,
'user_id' => 1,
'responsible' => $faker->name
);
});
<file_sep><?php
namespace App\Services;
use App\Activity;
use App\File;
use App\Item;
use App\PreviousSteps;
use App\Repositories\StepRepository;
use App\State;
use App\StatesMap;
use App\Step;
use App\StepsMap;
use Illuminate\Database\Eloquent\Collection;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Storage;
use Symfony\Component\HttpFoundation\File\UploadedFile;
use Symfony\Component\HttpFoundation\FileBag;
/**
* Class Stepservice
* @package App\Services
*/
class Stepservice implements ServiceInterface
{
/**
* @var StepRepository
*/
private StepRepository $stepRepository;
/**
* Stepservice constructor.
* @param StepRepository $stepRepository
*/
public function __construct(StepRepository $stepRepository)
{
$this->stepRepository = $stepRepository;
}
/**
* @return Step[]|Collection
*/
public function all()
{
return $this->stepRepository->all();
}
/**
* @param array $data
* @return Step
*/
public function create(array $data): Step
{
$step = $this->stepRepository->create($data);
if (array_key_exists($step->stepInformation->id, Activity::$stepActivitiesMap)) {
foreach (Activity::$stepActivitiesMap[$step->stepInformation->id] as $activity) {
Activity::create([
'description' => $activity,
'checked' => false,
'step_id' => $step->id
]);
}
}
foreach ($step->stepInformation->mappedStates as $mappedState) {
State::create([
'step_id' => $step->id,
'state_map_id' => $mappedState->state_id
]);
}
return $step;
}
/**
* @param Step $resource
* @param Request $data
* @return bool
*/
public function update(Model $resource, $data): bool
{
if ($resource->isLocked()) {
return false;
}
$this->uploadFiles($data->files, $resource);
// handle other kinds of inputs
foreach ($data->all() as $state_id => $input) {
if (!is_numeric($state_id)) {
continue;
}
$state = State::find($state_id);
if ($state === null) {
continue;
}
$state->update([
'value' => $input
]);
// now propagate
// @todo: Refactor to DRY with files propagation
// take item's steps.
$itemSteps = $resource->item->steps;
/* @var Step $itemStep */
foreach ($itemSteps as $itemStep) {
if ($itemStep->id === $resource->id) {
continue;
}
$stateToUpdate = $itemStep->states()->where('state_map_id', $state->state_map_id)->first();
if ($stateToUpdate === null) {
continue;
}
$stateToUpdateInformation = $stateToUpdate
->stateStepInformation($stateToUpdate->step->step_map_id)
->first();
if ($stateToUpdateInformation === null) {
continue;
}
if ($stateToUpdate->stateInformation->should_propagate
&& $stateToUpdateInformation->type === StatesMap::INPUT) {
$stateToUpdate->update([
'value' => $input
]);
}
}
}
$this->handleActivities($data->all());
if ($data->has('status')) {
if ($this->isADecisionPoint($resource) && (int)$data->get('status') === Step::DENIED) {
/* @var Collection $steps */
$steps = $resource->item->steps;
$this->deny($steps, $resource);
}
$this->stepRepository->update($resource, [
'status' => $data->get('status'),
'approver' => $data->get('approver')
]);
}
if ($data->has('comments')) {
$this->stepRepository->update($resource, [
'comments' => $data->get('comments')
]);
}
return true;
}
/**
* @param Step $step
* @return bool
*/
private function isADecisionPoint(Step $step): bool
{
return $step->stepInformation->id === StepsMap::RESULTADOS_ACEITAVEIS_QER
|| $step->stepInformation->id === StepsMap::RESULTADOS_ACEITAVEIS_ECR
|| $step->stepInformation->id === StepsMap::RESULTADOS_ACEITAVEIS_EDR;
}
/**
* @param Collection $steps
* @param Step $resource
*/
private function deny(Collection $steps, Step $resource): void
{
$this->getPhaseSteps($steps, $resource)->each(static function (Step $step) {
$step->update([
'status' => Step::DENIED
]);
});
}
/**
* @param Collection $steps
* @param Step $resource
* @return Collection
*/
private function getPhaseSteps(Collection $steps, Step $resource): Collection
{
return $steps->filter(static function (Step $step) use ($resource) {
$isVendorQualification = $step->stepInformation->phase === 'QER' &&
$step->stepInformation->id === StepsMap::QUALIFICACAO_DO_FORNECEDOR;
return !$isVendorQualification && $step->stepInformation->phase === $resource->stepInformation->phase;
});
}
/**
* @param FileBag $files
* @param Step $step
*/
private function uploadFiles(FileBag $files, Step $step): void
{
/* @var UploadedFile $file */
foreach ($files as $stateKey => $file) {
$state = State::find($stateKey);
$path = File::all()->count() . '_' . $file->getClientOriginalName();
//$file->move(storage_path('app/public'), $path);
Storage::disk('public')->put($path, file_get_contents($file->getRealPath()));
File::create([
'state_id' => $state->id,
'path' => $path
]);
// take item's steps.
$itemSteps = $step->item->steps;
/* @var Step $itemStep */
foreach ($itemSteps as $itemStep) {
if ($itemStep->id === $step->id) {
continue;
}
$stateToUpdate = $itemStep->states()->where('state_map_id', $state->state_map_id)->first();
if ($stateToUpdate === null) {
continue;
}
$stateToUpdateInformation = $stateToUpdate
->stateStepInformation($stateToUpdate->step->step_map_id)
->first();
if ($stateToUpdateInformation === null) {
continue;
}
if ($stateToUpdate->stateInformation->should_propagate &&
$stateToUpdateInformation->type === StatesMap::INPUT) {
File::create([
'state_id' => $stateToUpdate->id,
'path' => $path
]);
}
}
}
}
/**
* @param array $data
*/
private function handleActivities(array $data): void
{
foreach ($data as $key => $value) {
if ($value === 'on' && stripos($key, 'activity_') !== false) {
$activityId = explode('_', $key);
Activity::find($activityId[1])->update([
'checked' => true
]);
}
}
}
/**
* @param Model $model
* @return bool|null
* @throws \Exception
*/
public function delete(Model $model)
{
return $this->stepRepository->delete($model);
}
/**
* Build workflow Structure
* @param Item $item
*/
public function setupWorkflow(Item $item)
{
foreach ($item->steps()->get() as $step) {
switch ($step->step_map_id) {
case StepsMap::ANALISE_DO_FORNECEDOR:
case StepsMap::PLANO_DE_CONTROLE_DE_PROTOTIPO:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::ESPECIFICACOES_PRELIMINARES)->id
]);
break;
case StepsMap::SOLICITACAO_DO_LOTE_1:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::PLANO_DE_CONTROLE_DE_PROTOTIPO)->id
]);
break;
case StepsMap::LOTE_1:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::SOLICITACAO_DO_LOTE_1)->id
]);
break;
case StepsMap::RESULTADOS_ACEITAVEIS_ECR:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::LOTE_1)->id
]);
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::ANALISE_DO_FORNECEDOR)->id
]);
break;
case StepsMap::PLANO_DE_CONTROLE_DE_QUALIDADE:
case StepsMap::ATUALIZACOES_E_CORRECOES_ECR:
case StepsMap::DESENVOLVIMENTO_DA_FERRAMENTA:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::RESULTADOS_ACEITAVEIS_ECR)->id
]);
break;
case StepsMap::SOLICITACAO_DO_LOTE_2:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::DESENVOLVIMENTO_DA_FERRAMENTA)->id
]);
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::PLANO_DE_CONTROLE_DE_QUALIDADE)->id
]);
break;
case StepsMap::ACOES_DE_VERIFICACAO:
case StepsMap::LOTE_2:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::SOLICITACAO_DO_LOTE_2)->id
]);
break;
case StepsMap::RESULTADOS_ACEITAVEIS_EDR:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::LOTE_2)->id
]);
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::ACOES_DE_VERIFICACAO)->id
]);
break;
case StepsMap::ATUALIZACOES_E_CORRECOES_EDR:
case StepsMap::QUALIFICACAO_DO_FORNECEDOR:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::RESULTADOS_ACEITAVEIS_EDR)->id
]);
break;
case StepsMap::PLANO_DE_CONTROLE_DE_MANUFATURA:
case StepsMap::SOLICITACAO_DO_LOTE_3:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::QUALIFICACAO_DO_FORNECEDOR)->id
]);
break;
case StepsMap::LOTE_3:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::SOLICITACAO_DO_LOTE_3)->id
]);
break;
case StepsMap::ACOES_DE_VALIDACAO:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::PLANO_DE_CONTROLE_DE_MANUFATURA)->id
]);
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::SOLICITACAO_DO_LOTE_3)->id
]);
break;
case StepsMap::RESULTADOS_ACEITAVEIS_QER:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::LOTE_3)->id
]);
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::ACOES_DE_VALIDACAO)->id
]);
break;
case StepsMap::ATUALIZACOES_E_CORRECOES_QER:
case StepsMap::ITEM_LIBERADO_PARA_PRODUCAO:
PreviousSteps::create([
'step_id' => $step->id,
'previous_step_id' => $item->step(StepsMap::RESULTADOS_ACEITAVEIS_QER)->id
]);
break;
default:
break;
}
}
}
}
<file_sep><?php
namespace App\Http\Controllers;
use App\Http\Requests\ItemRequest;
use App\Item;
use App\Project;
use App\Services\ItemsService;
use App\Status;
use Illuminate\Contracts\Foundation\Application;
use Illuminate\Contracts\View\Factory;
use Illuminate\Http\RedirectResponse;
use Illuminate\Http\Request;
use Illuminate\Http\Response;
use Illuminate\View\View;
/**
* Class ItemsController
* @package App\Http\Controllers
*/
class ItemsController extends Controller
{
private $states = [
'especificacoes_preliminares',
"plano_de_controle_de_prototipo",
"analise_do_fornecedor",
"solicitacao_do_lote_1",
"lote_1",
'resultados_aceitaveis_ecr',
'atualizacoes_e_correcoes_ecr',
'plano_de_controle_de_qualidade',
'desenvolvimentp_da_ferramenta',
'solicitacao_do_lote_2',
'lote_2',
'acoes_de_verificacao',
'resultados_aceitaveis_edr',
'atualizacoes_e_correcoes_edr',
'qualificacao_do_fornecedor',
'solicitacao_do_lote_3',
'plano_de_controle_de_manufatura',
'lote_3',
'acoes_de_validacao',
'resultados_aceitaveis_qer',
'atualizacoes_e_correcoes_qer',
'item_liberado_para_producao'
];
/**
* @var ItemsService
*/
private ItemsService $itemsService;
/**
* ItemsController constructor.
* @param ItemsService $itemsService
*/
public function __construct(ItemsService $itemsService)
{
$this->itemsService = $itemsService;
}
/**
* Display a listing of the resource.
*
* @return Application|Factory|Response|View
*/
public function index()
{
$items = $this->itemsService->all();
return view('items.index', compact('items'));
}
/**
* Show the form for creating a new resource.
*
* @return Application|Factory|Response|View
*/
public function create()
{
$statuses = Status::all();
$projects = Project::all();
return view('items.form', compact('statuses', 'projects'));
}
/**
* Store a newly created resource in storage.
*
* @param ItemRequest $request
* @return RedirectResponse
*/
public function store(ItemRequest $request)
{
$this->itemsService->create($request->validated());
return redirect()->to('items')->with('success', 'Item criado com sucesso');
}
/**
* Display the specified resource.
*
* @param Item $item
* @return Application|Factory|Response|View
*/
public function show(Item $item)
{
$statuses = Status::all();
$projects = Project::all();
return view('items.form', compact('statuses', 'projects', 'item'));
}
/**
* Show the form for editing the specified resource.
*
* @param Item $item
* @return Application|Factory|Response|View
*/
public function edit(Item $item)
{
$statuses = Status::all();
$projects = Project::all();
return view('items.form', compact('item', 'statuses', 'projects'));
}
/**
* Update the specified resource in storage.
*
* @param ItemRequest $request
* @param Item $item
* @return RedirectResponse
*/
public function update(ItemRequest $request, Item $item)
{
$this->itemsService->update($item, $request->validated());
return redirect()->back()->with('success', 'Item atualizado com sucesso');
}
/**
* Remove the specified resource from storage.
*
* @param Item $item
* @return RedirectResponse
* @throws \Exception
*/
public function destroy(Item $item)
{
$this->itemsService->delete($item);
return redirect()->back()->with('success', 'Item deletado com sucesso');
}
/**
* View Item's workflow
* @param Item $item
* @return Application|Factory|View
*/
public function viewWorkflow(Item $item)
{
$hidePanelHeader = true;
return view('workflow.index', compact('item', 'hidePanelHeader'));
}
public function getSteps(Item $item)
{
return response()->json($item);
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
/**
* Class File
* @package App
*/
class File extends Model
{
/**
* @var string[]
*/
protected $fillable = [
'path',
'state_id'
];
/**
* @return BelongsTo
*/
public function state(): BelongsTo
{
return $this->belongsTo(State::class, 'state_id');
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Relations\BelongsTo;
use Illuminate\Database\Eloquent\Relations\HasMany;
use Illuminate\Database\Eloquent\SoftDeletes;
/**
* Class Step
* @package App
*/
class Step extends Model
{
use SoftDeletes;
/**
* @var string[]
*/
protected $fillable = [
'item_id',
'step_map_id',
'approver',
'status',
'comments'
];
/**
*
*/
public const UNCHECKED = 0;
/**
*
*/
public const APPROVED = 1;
/**
*
*/
public const DENIED = 2;
/**
* @return HasMany
*/
public function previous(): HasMany
{
return $this->hasMany(PreviousSteps::class, 'step_id');
}
/**
* @return bool
*/
public function isLocked(): bool
{
$phase = $this->stepInformation->phase ?? '';
$phaseDecisionPoint = $this->findPhaseDecisionPoint($phase);
$phaseUpdatesAndCorrections = $this->findPhaseUpdatesAndCorrections($phase);
if ($phaseDecisionPoint !== null
&& $phaseDecisionPoint->getAttribute('status') === Step::DENIED
&& $phaseUpdatesAndCorrections !== null
&& $this->stepInformation->id !== $phaseUpdatesAndCorrections->stepInformation->id
&& $phaseUpdatesAndCorrections->status !== Step::APPROVED) {
return true;
}
$previousSteps = $this->previous()->get();
if (
$this->stepInformation->id === StepsMap::ATUALIZACOES_E_CORRECOES_ECR
|| $this->stepInformation->id === StepsMap::ATUALIZACOES_E_CORRECOES_EDR
|| $this->stepInformation->id === StepsMap::ATUALIZACOES_E_CORRECOES_QER
) {
$previousStep = $previousSteps->first()->previousStep;
return $previousStep->status < self::DENIED;
}
return $previousSteps->filter(static function (PreviousSteps $previousSteps) {
return $previousSteps->previousStep->status === self::UNCHECKED
|| $previousSteps->previousStep->status === self::DENIED;
})->count() > 0;
}
/**
* @return BelongsTo
*/
public function stepInformation(): BelongsTo
{
return $this->belongsTo(StepsMap::class, 'step_map_id');
}
/**
* @return HasMany
*/
public function states(): HasMany
{
return $this->hasMany(State::class, 'step_id');
}
/**
* @return HasMany
*/
public function activities(): HasMany
{
return $this->hasMany(Activity::class, 'step_id');
}
/**
* @return BelongsTo
*/
public function item(): BelongsTo
{
return $this->belongsTo(Item::class, 'item_id');
}
/**
* @param string $phase
* @return Step|null
*/
private function findPhaseDecisionPoint(string $phase): ?Step
{
switch ($phase) {
case 'ECR':
$stepMapId = StepsMap::RESULTADOS_ACEITAVEIS_ECR;
break;
case 'EDR':
$stepMapId = StepsMap::RESULTADOS_ACEITAVEIS_EDR;
break;
case 'QER':
$stepMapId = StepsMap::RESULTADOS_ACEITAVEIS_QER;
break;
default:
return null;
}
return self::where([
'step_map_id' => $stepMapId,
'item_id' => $this->getAttribute('item_id')
])->first();
}
/**
* @param string $phase
* @return Step|null
*/
private function findPhaseUpdatesAndCorrections(string $phase): ?Step
{
switch ($phase) {
case 'ECR':
$stepMapId = StepsMap::ATUALIZACOES_E_CORRECOES_ECR;
break;
case 'EDR':
$stepMapId = StepsMap::ATUALIZACOES_E_CORRECOES_EDR;
break;
case 'QER':
$stepMapId = StepsMap::ATUALIZACOES_E_CORRECOES_QER;
break;
default:
return null;
}
return self::where([
'step_map_id' => $stepMapId,
'item_id' => $this->getAttribute('item_id')
])->first();
}
}
<file_sep><?php
use App\StatesMap;
use Illuminate\Database\Seeder;
class StatesMapSeeder extends Seeder
{
private $statesMap = [
[
'name' => 'Bill Of Material(BOM)',
'type' => StatesMap::FILE,
'is_mandatory' => true,
'should_propagate' => true
],
[
'name' => 'Estrutura Funcional do Produto',
'type' => StatesMap::FILE,
'is_mandatory' => true,
'should_propagate' => true
],
[
'name' => 'Especificações Preliminares',
'type' => StatesMap::FILE,
'is_mandatory' => true,
'should_propagate' => true
],
[
'name' => 'Etapas do Processo',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => false,
],
[
'name' => 'Outros Arquivos',
'type' => StatesMap::FILE,
'is_mandatory' => false,
'should_propagate' => false
],
[
'name' => 'Plano de Controle de Protótipo',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Histórico de Risco do Fornecedor',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Retorno do Fornecedor',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => false,
],
[
'name' => 'Solicitação com o Fornecedor',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Prazo de Entrega da Solicitação',
'type' => StatesMap::DATE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Responsável da Engenharia',
'type' => StatesMap::TEXT,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Responsável da Qualidade',
'type' => StatesMap::TEXT,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Responsável da Fábrica',
'type' => StatesMap::TEXT,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório de Resultados do Lote 1',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Ata de Reunião de Checkpoint',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Plano de Ações de Correção',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Documentos Adicionais',
'type' => StatesMap::FILE,
'should_propagate' => false,
'is_mandatory' => false,
],
[
'name' => 'Plano de Controle de Qualidade',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório de Desenvolvimento do Fornecedor',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório de Resultados do Lote 2',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório dos Resultados de Verificação',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Contrato de Serviços Prestados',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório de Qualificação do Fornecedor',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Plano de Controle de Manufatura',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório de Resultados do Lote 3',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
[
'name' => 'Relatório de Resultados de Validação',
'type' => StatesMap::FILE,
'should_propagate' => true,
'is_mandatory' => true,
],
];
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
foreach ($this->statesMap as $state) {
StatesMap::create($state);
}
}
}
<file_sep><?php
use App\Status;
use Illuminate\Database\Seeder;
class StatusesSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
Status::create([
'name' => 'Criado'
]);
Status::create([
'name' => 'Proc. Avaliação'
]);
Status::create([
'name' => 'Real. Design'
]);
Status::create([
'name' => 'Liberado'
]);
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class DatabaseSeeder extends Seeder
{
/**
* Seed the application's database.
*
* @return void
*/
public function run()
{
$this->call(UserSeeder::class);
$this->call(NaturesSeeder::class);
$this->call(TypesSeeder::class);
$this->call(ProjectsSeeder::class);
$this->call(StatusesSeeder::class);
// $this->call(ItemsSeeder::class);
$this->call(StepsMapSeeder::class);
$this->call(StatesMapSeeder::class);
$this->call(StepsStatesMapSeeder::class);
}
}
<file_sep><?php
namespace App\Services;
use App\Project;
use App\Repositories\ProjectsRepository;
use Illuminate\Database\Eloquent\Model;
class ProjetcsService implements ServiceInterface
{
private ProjectsRepository $projectsRepository;
public function __construct(ProjectsRepository $projectsRepository)
{
$this->projectsRepository = $projectsRepository;
}
public function all()
{
return $this->projectsRepository->all();
}
public function create(array $data)
{
return $this->projectsRepository->create($data);
}
public function update(Model $resource, $data)
{
$this->projectsRepository->update($resource, $data);
}
public function delete(Model $model)
{
$this->projectsRepository->delete($model);
}
}
<file_sep><?php
use App\StepsMap;
use Illuminate\Database\Seeder;
class StepsMapSeeder extends Seeder
{
private $states = [
[
'ui_id' => 'especificacoes_preliminares',
'name' => 'Especificacões Preliminares',
'phase' => 'ECR',
],
[
'ui_id' => "plano_de_controle_de_prototipo",
'name' => 'Plano de Controle de Protótipo',
'phase' => 'ECR',
],
[
'ui_id' => "analise_do_fornecedor",
'name' => 'Análise do Fornecedor',
'phase' => 'ECR',
],
[
'ui_id' => "solicitacao_do_lote_1",
'name' => 'Solicitação do Lote 1',
'phase' => 'ECR',
],
[
'ui_id' => "lote_1",
'name' => 'Lote 1',
'phase' => 'ECR',
],
[
'ui_id' => 'resultados_aceitaveis_ecr',
'name' => 'Resultados aceitáveis?',
'type' => 'D',
'phase' => 'ECR',
],
[
'ui_id' => 'atualizacoes_e_correcoes_ecr',
'name' => 'Atualizações e Correções',
'phase' => 'ECR',
],
[
'ui_id' => 'plano_de_controle_de_qualidade',
'name' => 'Plano de Controle de Qualidade',
'phase' => 'EDR',
],
[
'ui_id' => 'desenvolvimentp_da_ferramenta',
'name' => 'Desenvolvimento da Ferramenta',
'phase' => 'EDR',
],
[
'ui_id' => 'solicitacao_do_lote_2',
'name' => 'Solicitação do Lote 2',
'phase' => 'EDR',
],
[
'ui_id' => 'lote_2',
'name' => '<NAME>',
'phase' => 'EDR',
],
[
'ui_id' => 'acoes_de_verificacao',
'name' => 'Ações de Verificação',
'phase' => 'EDR',
],
[
'ui_id' => 'resultados_aceitaveis_edr',
'name' => 'Resultados Aceitáveis?',
'type' => 'D',
'phase' => 'EDR',
],
[
'ui_id' => 'atualizacoes_e_correcoes_edr',
'name' => 'Atualizações e Correções',
'phase' => 'EDR',
],
[
'ui_id' => 'qualificacao_do_fornecedor',
'name' => 'Qualificação do Fornecedor',
'phase' => 'QER',
],
[
'ui_id' => 'solicitacao_do_lote_3',
'name' => 'Solicitação do Lote 3',
'phase' => 'QER',
],
[
'ui_id' => 'plano_de_controle_de_manufatura',
'name' => 'Plano de Controle de Manufatura',
'phase' => 'QER',
],
[
'ui_id' => 'lote_3',
'name' => '<NAME>',
'phase' => 'QER',
],
[
'ui_id' => 'acoes_de_validacao',
'name' => 'Ações de Validação',
'phase' => 'QER',
],
[
'ui_id' => 'resultados_aceitaveis_qer',
'name' => 'Resultados Aceitáveis?',
'type' => 'D',
'phase' => 'QER',
],
[
'ui_id' => 'atualizacoes_e_correcoes_qer',
'name' => 'Atualizações e Correções',
'phase' => 'QER',
],
[
'ui_id' => 'item_liberado_para_producao',
'name' => 'Item Liberado para Produção',
'phase' => 'QER',
]
];
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
foreach ($this->states as $state) {
StepsMap::create($state);
}
}
}
| 4b808e1c12a4a0646c63c25a6f04fcf8a36fed25 | [
"JavaScript",
"PHP"
] | 32 | PHP | josefelipetto/process-management-system | 06e7b14dd993cff1da7462938dc20e30c8b8488a | 5d460a365687b66c04ef9ab3788f5d876815b3d5 |
refs/heads/master | <repo_name>maxfieldEland/deep_learning<file_sep>/feature_extractor.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 3 19:09:21 2020
@author: max
"""
import keras
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, Flatten
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
import numpy as np
import keras
import numpy as np
from keras.applications import vgg16, inception_v3, resnet50, mobilenet
#Load the VGG model
vgg_model = vgg16.VGG16(weights='imagenet')
#Load the Inception_V3 model
inception_model = inception_v3.InceptionV3(weights='imagenet')
#Load the ResNet50 model
resnet_model = resnet50.ResNet50(weights='imagenet')
#Load the MobileNet model
mobilenet_model = mobilenet.MobileNet(weights='imagenet')<file_sep>/load_data.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 30 14:54:12 2020
@author: max
"""
import glob
import numpy as np
import imageio
from sklearn.model_selection import train_test_split
from skimage.transform import resize
def main():
categories = sorted(glob.glob('*'))
images = []
labels = []
for category in categories:
for test_filename in glob.glob(category+'/*'):
image = np.array(imageio.imread(test_filename))
# resize image
image = resize(image, (224,224,3))
images.append(image)
labels.append(category)
# one hot encode the labelse
oneHotEncoder = dict()
for idx,i in enumerate(categories):
encodedVec = np.zeros(2)
encodedVec[idx] = 1
oneHotEncoder[i] = encodedVec
encoded_labels=[]
for i in labels:
encoded_labels.append(oneHotEncoder[i])
X_train, X_test, y_train, y_test = train_test_split(images,encoded_labels)
X_train = np.array(X_train)
y_train = np.array(y_train)
X_test = np.array(X_test)
y_test = np.array(y_test)
return(X_train, X_test, y_train, y_test)<file_sep>/README.md
# deep_learning
Homework assignments for UVMs Graduate Level Deep Learning Course
<file_sep>/vgg_model.py
import keras
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, Flatten
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
import numpy as np
import load_data
# load data into workspace
X_train, X_test, y_train, y_test = load_data()
input_shape = (224, 224, 3)
#Instantiate an empty model
model = Sequential([
Conv2D(64, (3, 3), input_shape=input_shape, padding='same', activation='relu'),
Conv2D(64, (3, 3), activation='relu', padding='same'),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(128, (3, 3), activation='relu', padding='same'),
Conv2D(128, (3, 3), activation='relu', padding='same',),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(256, (3, 3), activation='relu', padding='same',),
Conv2D(256, (3, 3), activation='relu', padding='same',),
Conv2D(256, (3, 3), activation='relu', padding='same',),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(512, (3, 3), activation='relu', padding='same',),
Conv2D(512, (3, 3), activation='relu', padding='same',),
Conv2D(512, (3, 3), activation='relu', padding='same',),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(512, (3, 3), activation='relu', padding='same',),
Conv2D(512, (3, 3), activation='relu', padding='same',),
Conv2D(512, (3, 3), activation='relu', padding='same',),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Flatten(),
Dense(4096, activation='relu'),
Dense(4096, activation='relu'),
Dense(2, activation='softmax')
])
model.summary()
# Compile the model
model.compile(loss=keras.losses.categorical_crossentropy, optimizer='adam', metrics=['accuracy'])
model.fit(X_train, y_train)
model.evaluate(X_test, y_test, batch_size = 32)
| 94ccdb4e017e1c7627d564dab21752dc32a16b81 | [
"Markdown",
"Python"
] | 4 | Python | maxfieldEland/deep_learning | b5dcefedae3390cdeeb53fc9aad8feff5fedbd8d | 491591c6f4f3d57f6b678f14f43bc18474866799 |
refs/heads/master | <repo_name>TrevorNT/rpi-printqueue-ng<file_sep>/dev/printqueue_ng/html.py
# html.py
from printers.models import Area
def navbar(active_tab, last_update):
return_value = """
<div class="navbar navbar-default navbar-static-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/">HD Print Queue</a>
</div>
<div class="navbar-collapse collapse">
<ul class="nav navbar-nav">"""
all_areas = Area.objects.all().order_by("id")
active_is_a_main_tab = False
if not active_tab:
active_is_a_main_tab = True
for area in all_areas:
if area.id < 8:
if active_tab == area.name:
return_value += area.get_active_html()
active_is_a_main_tab = True
else:
return_value += area.get_inactive_html()
elif area.id == 8:
if not active_is_a_main_tab:
return_value += "<li class='dropdown active'>"
else:
return_value += "<li class='dropdown'>"
return_value += "<a href='#' class='dropdown-toggle' data-toggle='dropdown'>Other Areas <b class='caret'></b></a>"
return_value += "<ul class='dropdown-menu'>"
if active_tab == area.name:
return_value += area.get_active_html()
else:
return_value += area.get_inactive_html()
else:
if active_tab == area.name:
return_value += area.get_active_html()
else:
return_value += area.get_inactive_html()
return_value += "</ul>"
return_value += "</li>"
return_value += "</ul>"
if last_update:
return_value += """
<ul class="nav navbar-nav navbar-right">
<li><a href="#">Updated: %s</a></li>
</ul>""" % last_update
return_value += """
</div>
</div>
</div>
"""
return return_value
def footer():
return """
<div id="footer">
<div class="container">
<p class="text-muted"><span>Created by <a href="mailto:<EMAIL>"><NAME></a>, for Rensselaer Polytechnic Institute.</span>
<span style="float: right;">Maintained by the <a href="http://helpdesk.rpi.edu/">VCC Help Desk</a> (x7777 or (518) 276-7777).</span></p>
</div>
</div>"""
<file_sep>/dev/printqueue_ng/updater.py
# updater.py
import paramiko
from printers.models import Area, Printer, Job
import datetime
import pytz
import re
SSH_SERVER = 'rcs-linux.rpi.edu'
SSH_USERNAME = 'so19'
SSH_PASSWORD = '<PASSWORD>'
# Customize these to your liking
UPDATE_INTERVAL_MINUTES = 2
UPDATE_INTERVAL_SECONDS = 0
def update(area):
if not type(area) is Area:
raise TypeError("area object is not of type printers.Area")
if area.last_updated + datetime.timedelta(minutes = UPDATE_INTERVAL_MINUTES, seconds = UPDATE_INTERVAL_SECONDS) > datetime.datetime.now(pytz.utc): # (That "None" means "not timezone aware". Long story. Leave that the way it is unless you can figure out how to preload the initial_data.json with timezone-aware data.)
return # It's not time to update yet
rcs_ssh = paramiko.SSHClient()
rcs_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
rcs_ssh.connect(SSH_SERVER, username = SSH_USERNAME, password = <PASSWORD>)
all_printers = Printer.objects.filter(area = area)
for printer in all_printers:
inp, outp, err = rcs_ssh.exec_command("lpq -P " + printer.name)
output = outp.readlines()
if len(output) == 1 and "no entries" in output[0].lower():
Job.objects.filter(printer = printer).delete() # Statement evaluates to: get all jobs by this printer and then delete them.
if printer.state != Printer.OPERATIONAL:
printer.state = Printer.OPERATIONAL
printer.save()
elif len(output) == 1 and "printer not found" in output[0].lower():
Job.objects.filter(printer = printer).delete() # ...just in case. For the most part, if it's not in lpq anymore, it won't ever have any jobs.
printer.state = Printer.ERROR
printer.error_message = "The queue of this printer is not available."
printer.save()
elif len(output) >= 1 and "printererror: check printer" in output[0].lower():
Job.objects.filter(printer = printer).delete()
printer.state = Printer.ERROR
printer.error_message = "Printer is not printing. Please check the printer for an error message."
printer.save()
if len(output) >= 3 and output[1].lower().split() == ['rank', 'owner', 'job', 'files', 'total', 'size']:
parse_jobs(output[2:], printer)
elif len(output) >= 1 and "is down:" in output[0].lower():
Job.objects.filter(printer = printer).delete()
printer.state = Printer.ERROR
printer.error_message = output[0][(output[0].lower().find("down:") + 6):]
if printer.error_message.strip() == "":
printer.error_message = "Printer is down."
printer.save()
if len(output) >= 3 and output[1].lower().split() == ['rank', 'owner', 'job', 'files', 'total', 'size']:
parse_jobs(output[2:], printer)
elif len(output) >= 4 and output[2].lower().split() == ['rank', 'owner', 'job', 'files', 'total', 'size']:
parse_jobs(output[3:], printer)
elif len(output) >= 1 and "problems finding printer" in output[0].lower():
Jobs.objects.filter(printer = printer).delete()
printer.state = Printer.ERROR
printer.error_message = "Printer is experiencing network problems."
printer.save()
if len(output) >= 3 and output[1].lower().split() == ['rank', 'owner', 'job', 'files', 'total', 'size']:
parse_jobs(output[2:], printer)
elif len(output) >= 4 and output[2].lower().split() == ['rank', 'owner', 'job', 'files', 'total', 'size']:
parse_jobs(output[3:], printer)
elif len(output) >= 2 and output[1].lower().split() == ['rank', 'owner', 'job', 'files', 'total', 'size']:
if "status: busy" in output[0].lower():
if printer.state != Printer.OPERATIONAL:
printer.state = Printer.OPERATIONAL
printer.save()
else:
if printer.state != Printer.ERROR:
printer.state = Printer.ERROR
if output[0].lower().find("status: ") != 0:
printer.error_message = output[0][(output[0].lower().find("status: ") + 8):].strip()
else:
print "Unknown printer status condition:\n" + output[0]
printer.save()
if printer.state != Printer.OPERATIONAL:
printer.state = Printer.OPERATIONAL
printer.save()
Job.objects.filter(printer = printer).delete()
parse_jobs(output[2:], printer)
else:
print "Unexpected condition in lpq status report:"
for line in output:
print "\t" + line
area.save() # Just using save() should, in theory, update the last_updated time field in the database. In theory.
rcs_ssh.close()
def parse_jobs(jobs, printer):
for job in jobs:
output_tokens = job.split()
newjob = Job()
# Get the location in the printer's queue that this job is. "active" == 0, "1st" will evaluate to 1, etc.
if "active" in output_tokens[0]:
newjob.position = 0
else:
newjob.position = int(re.findall('\d+', output_tokens[0])[0])
# Get the username
newjob.user_id = output_tokens[1]
# Get the job ID #
newjob.job_number = int(output_tokens[2])
# Get the job name
name_string = ""
for piece in output_tokens[3:-2]:
name_string += piece
newjob.name = name_string
# Identify the printer
newjob.printer = printer
# Finally, save the job to the db
newjob.save()
# Do this until jobs has been fully processed.
<file_sep>/dev/printqueue_ng/views.py
# views.py
from django.http import HttpResponse, HttpResponseNotFound, Http404
from django.shortcuts import render
import printqueue_ng.html as pq_html
import printqueue_ng.updater as updater
from pytz import timezone
from printers.models import Area, Printer, Job
from django.db.models import Count
HEAVY_LOAD_STATE_THRESHOLD = 7
def home(request):
return render(request, 'home.html', {'navbar': pq_html.navbar(False, False), 'footer': pq_html.footer(), 'server': request.META['SERVER_NAME'] + ":8000"})
def area(request, name):
this_area = None
try:
this_area = Area.objects.get(name = name)
except:
raise Http404
updater.update(this_area)
printers = []
area_printers = Printer.objects.filter(area = this_area).annotate(num_jobs = Count('job')).order_by('-num_jobs')
for printer in area_printers:
printer_jobs = Job.objects.filter(printer = printer).order_by("position")
state = "inactive"
if printer.state == 1:
state = "error"
elif len(printer_jobs) > 0 and len(printer_jobs) < HEAVY_LOAD_STATE_THRESHOLD:
state = "active"
elif len(printer_jobs) >= HEAVY_LOAD_STATE_THRESHOLD:
state = "heavy_load"
printers.append({'printer': printer, 'jobs': printer_jobs, 'state': state,})
update_time = this_area.last_updated.astimezone(timezone('US/Eastern'))
hour = update_time.hour % 12
minute = ""
ampm = " AM"
if hour == 0:
hour = 12
if update_time.hour >= 12:
ampm = " PM"
if update_time.minute < 10:
minute = "0" + str(update_time.minute)
else:
minute = str(update_time.minute)
update_time_string = str(update_time.month) + "/" + str(update_time.day) + ", " + str(hour) + ":" + str(minute) + ampm
return render(request, 'area.html', {'navbar': pq_html.navbar(name, update_time_string), 'footer': pq_html.footer(), 'server': request.META['SERVER_NAME'] + ":8000", 'area': name, 'printers': printers,})
<file_sep>/dev/printers/models.py
from django.db import models
# Create your models here.
class Area(models.Model):
name = models.CharField(max_length = 32)
friendly_name = models.CharField(max_length = 128)
last_updated = models.DateTimeField(auto_now = True, auto_now_add = True)
def get_active_html(self):
return '<li class="active"><a href="/' + self.name + '/">' + self.friendly_name + '</a></li>'
def get_inactive_html(self):
return '<li><a href="/' + self.name + '/">' + self.friendly_name + '</a></li>'
def __unicode__(self):
return self.friendly_name
class Printer(models.Model):
OPERATIONAL = 0
ERROR = 1
states = (
(OPERATIONAL, "Operational"),
(ERROR, "Error")
)
name = models.CharField(max_length = 32, unique = True)
friendly_name = models.CharField(max_length = 128)
state = models.IntegerField(choices = states)
error_message = models.CharField(max_length = 64, null = True, blank = True)
area = models.ForeignKey(Area)
def __unicode__(self):
return self.name
class Job(models.Model):
job_number = models.IntegerField()
position = models.IntegerField()
user_id = models.CharField(max_length = 9)
name = models.CharField(max_length = 128)
printer = models.ForeignKey(Printer)
def __unicode__(self):
return self.name
<file_sep>/ws/ps2/ps2.py
import logging
logging.basicConfig(level = logging.DEBUG)
from spyne.application import Application
from spyne.decorator import rpc
from spyne.service import ServiceBase
from spyne.model.primitive import Unicode
from spyne.model.complex import Iterable, ComplexModel
from spyne.protocol.http import HttpRpc
from spyne.protocol.json import JsonDocument
from spyne.server.wsgi import WsgiApplication
from spyne.model.fault import Fault
printers = {
'area': [ 'printer1', 'printer2', 'printer3' ],
}
class PrinterStatus( ComplexModel ):
# Specification:
# status: one of "idle" (accepting jobs, queue empty),
# "working" (accepting jobs, queue has some jobs),
# "loaded" (accepting jobs, queue has many jobs),
# "error" (accepting jobs, but printer has encountered error),
# "offline" (not accepting jobs)
# message: if status is "error" or "offline", passes along the message from pstat; otherwise, is an empty string
# queue: a list of QueueJobs
pass
class QueueJob( ComplexModel ):
# Specification:
# name: name of print job
# size: size of print job
# position: position in queue (where 0 is the job which is actively printing)
pass
class PrinterStatusServer( ServiceBase ):
@rpc( Unicode, _returns = Unicode )
def Ping( context, pingMessage ):
if pingMessage == "Ping":
return "Pong"
else
raise Fault( faultcode = 'Client.MessageFault', faultstring = 'pingMessage was not "Ping"' )
@rpc( Unicode, _returns = PrinterStatus )
def Status( context, printerName ):
pass
@rpc( Unicode, _returns = Iterable( Unicode ) )
def ListPrinters( context, areaName ):
areaPrinters = printers.get(areaName, False)
if not areaPrinters:
raise Fault( faultcode = 'Client.ArgumentFault', faultstring = "The given areaName was not found" )
for printer in areaPrinters:
yield printer
@rpc( _returns = Iterable( Unicode ) )
def ListAreas( context ):
for area in printers:
yield area
app = Application( [ HelloWorldService ], tns = 'edu.rpi.PrinterStatusServer', in_protocol = HttpRpc( validator = "soft" ), out_protocol = JsonDocument() )
if __name__ == "__main__":
from wsgiref.simple_server import make_server
application = WsgiApplication( app )
server = make_server( '0.0.0.0', 8000, application )
server.serve_forever()
<file_sep>/dev/printers/admin.py
from django.contrib import admin
from printers.models import Area, Printer, Job
# Register your models here.
admin.site.register(Area)
admin.site.register(Printer)
admin.site.register(Job)
<file_sep>/README.md
RPI Print Queue, Next Generation
| e58a9e8cae03e0270b6168767713aa5471adae6d | [
"Markdown",
"Python"
] | 7 | Python | TrevorNT/rpi-printqueue-ng | 92b4b23a3bf8bfc8188c8bf607644d5723ef379b | d74f5148744bcf28c8f5e36cb9f76e8bf53e82ca |
refs/heads/master | <repo_name>neo1218/algor<file_sep>/str_match/BF.py
# coding: utf-8
def BF(String, Pattern):
"""
:String: 文本串
:Pattern: 模式串
返回模式串文本串中匹配的第一个位置,
若不匹配返回-1
"""
i = 0; j = 0 # 初始化游标
lenS = len(String)
lenP = len(Pattern)
while(j != lenP and i != lenS):
if (String[i] == Pattern[j]):
i += 1
j += 1
else:
i = (i-j+1)
j = 0
if (j == lenP):
return (i-j)
else:
return -1
# test
if __name__ == "__main__":
print BF('itpattern', 'pattern') # 2
print BF('itpattern', 'patternl') # -1
<file_sep>/tests/test3.cpp
/* data structure */
# include <vector>
# include <deque>
# include <list>
# include <set>
# include <map>
# include <unordered_set>
# include <unordered_map>
# include <iterator>
# include <algorithm>
# include <numeric>
# include <functional>
# include <iostream>
using namespace std;
int main()
{
/* 其实数据结构和数据库是有天然的关系的
* 数据结构和数据库都是研究关系的而且是存储的关系
* 所不同的是数据库的数据是更大数据的集合 */
// 其实现在内存空间相比于时间已经不是最重要的了
// 哪些是你希望别人看见的, 哪些是你不希望别人看见的(信息隐藏)
// 一个程序的核心就是[算法], 排序, 分词(分词表, 搜索, 匹配)
return 0;
}
<file_sep>/str_match/kmp.py
# coding: utf-8
"""
kmp.py
~~~~~~
python kmp算法实现, 无注释版
"""
def make_next(t, next):
j = 0
n = len(t)
next.append(0)
for i in range(1, n):
while(j > 0 and (t[j] != t[i])):
j = next[j-1]
if (t[i] == t[j]):
j = j+1
next.append(j)
def kmp(s, t):
j = 0
m = len(s); n = len(t)
next = []
make_next(t, next)
for i in range(m-n):
while (s[i] == t[j]):
i += 1; j += 1
if (j == n):
return (i-j)
# if (j == n):
# break
else:
j = next[j]
return False
if __name__ == "__main__":
print " test ----- kmp "
s = raw_input("target string: ")
t = raw_input("pattern string: ")
# s = 'hhhlhhhhhh'
# t = 'l'
print "%r in %r >> " % (t, s),
print kmp(s, t)
<file_sep>/notes/execs/QFU/QuickUnion.java
// QuickUnion算法: 使用树结构实现并查集
// 数据结构: 依然是一个整数数组id, 但是不再用数组的索引表示组编号
// 而是用id[i]表示节点i的组编号, 即构造树状的结构
public class QuickUnion
{
int id[];
QuickUnion(int N)
{
id = new int[N];
for (int i = 0; i < id.length(); i++)
{
// 构造函数
// 初始化操作
id[i] = i;
}
}
int root(int i)
{
// 计算元素i的根节点
// 使用深度优先巧妙的避开问题的规模,
// 但是需要注意构造树的深度
while (i != id[i])
{
i = id[i];
} // DFS
return i;
}
void union(int p, int q)
{
// 连接操作
if (root[p] == root[q])
{ return; }
else
{ root[p] = root[q]; }
}
boolean connected(int p, int q)
{
// 判断是否连接
return root[p] == root[q];
}
}
<file_sep>/sorts/merge_sort.py
# coding: utf-8
# merge_sort.py: 归并排序
"""
基本思想: 分治法
==> 1. 划分
--> 1. 平衡子问题(子序列规模相当)
--> 2. 独立性(子序列的求解相对独立)
==> 2. 求解(递归)
--> 1. 边界条件(如果只有一个元素的情况)
--> 2. 递归模式(代码编写)
==> 3. 合并(将2个子序列合并为一个序列)
分治法的效率往往取决于合并这一步
"""
def merge_sort(lit, lit2, s, t):
"""归并排序"""
if s == t:
lit2[s] = lit[s]
else:
m = (s+t)/2
merge_sort(lit, lit2, s, m)
merge_sort(lit, lit2, m+1, t)
merge(lit2, lit, s, m, t)
def merge(lit2, lit, s, m, t):
"""
合并操作
i->lit/2 j->lit/2 k->lit
"""
i = s; j = m+1; k = s
while (i <= m and j <= t):
if lit[i] <= lit[j]:
lit2[k] = lit[i]
k+=1; i+=1
else:
lit2[k] = lit[j]
k+=1; j+=1
if (i < m):
while(i < m):
lit2[k] = lit[i]
k+=1; j+=1
if (j < t):
while(j < t):
lit2[k] = lit[j]
k+=1; j+=1
# main
lit = []
user_input = raw_input("请输入待排序序列: ")
str_lit = user_input.split()
for i in str_lit:
lit.append(int(i))
s = 0; t = len(lit)-1
lit2 = range(len(lit))
merge_sort(lit, lit2, s, t)
print lit2
<file_sep>/tests/test2.cpp
/* test2.cpp --> test more */
# include <vector>
# include <deque>
# include <list>
# include <set>
# include <map>
# include <unordered_set>
# include <unordered_map>
# include <iterator>
# include <algorithm>
# include <numeric>
# include <functional>
# include <iostream>
using namespace std;
int main()
{
return 0;
}
<file_sep>/notes/lecture2.md
# 第二讲: 并查集(UFS)的算法实现
## DataStructure
+ Integer array id[] of size N
+ Interpretation: p & q are connected iff they have same id
id[]--> 0 1 2 3 4 5 6 7 8 9
id ---> 0 1 1 3 3 0 0 1 3 3
connected--> {0, 5, 6} {1, 2, 7} {3, 4, 8, 9}
### Find
now, we can check if {id[p] == id[q]} to test if connected(p, q)
### Union
merge: second id == first id
## Java
--> UF.java
<file_sep>/sorts/pao_sort.cpp
/* pao_sort.cpp: 冒泡排序 */
# include <vector>
# include <deque>
# include <list>
# include <set>
# include <map>
# include <unordered_set>
# include <unordered_map>
# include <iterator>
# include <algorithm>
# include <numeric>
# include <functional>
# include <iostream>
using namespace std;
int main()
{
vector<int> lit;
int n(0), i(0), j(0), flag(1), k(0), temp(0);
cout << "输入待排序的序列长度>> ";
cin >> n;
cout << "输入待排序的序列>> ";
for (i = 0; i < n; i++)
{
cin >> k;
lit.push_back(k);
}
if (flag)
{
for (i = 0; i < n; i++)
{
for (j = 0; j < n-1; j++)
{
if (lit[j] > lit[j+1])
{
temp = lit[j];
lit[j] = lit[j+1];
lit[j+1] = temp;
flag = 0;
}
else flag = 1;
}
}
}
for (item: lit) cout << item << " ";
cout << endl;
return 0;
}
<file_sep>/str_match/BF.md
# 字符串匹配算法~BF算法
## 问题描述
我们经常需要在一段字符串中查找某个子串。需要如何查找呢?
## 算法描述
BF算法就是解决字符串匹配问题的一个算法。<br/>
比如现在需要在文本串S(abcdba)中查找模式串P(dba) <br/>
首先把文本串和模式串首对齐, i和j分别标记文本串和模式串的当前匹配字符的位置(从0开始)
i
S a b c d b a
P d b a
j
第一次匹配
S[i] != P[j], 将模式串向右移**1位**
i
S a b c d b a
P d b a
j
此时, i移到文本串的(i-j+1)位置, j回到模式串的0位置
第二次匹配
S[i] != p[j], 将模式串向右移**1位**
i
S a b c d b a
P d b a
j
此时, i移到文本串的(i-j+1)位置, j回到模式串的0位置
第三次匹配
S[i] != p[j], 将模式串向右移**1位**
i
S a b c d b a
P d b a
j
此时, i移到文本串的(i-j+1)位置, j回到模式串的0位置
第四次匹配
S[i] == P[j], i++;j++
第五次匹配
S[i] == P[j], i++; j++
第六次匹配
s[i] == p[j], i++;j++
此时j已到达模式串末尾,匹配成功
匹配成功!
## 算法关键
从上面的案例可以看出, BF算法的两个特点
+ 1. 模式串和文本串都需要移动
+ 2. 每次匹配失效模式串只移动一位
## Python实现
def BF(String, Pattern):
"""
:String: 文本串
:Pattern: 模式串
返回模式串文本串中匹配的第一个位置,
若不匹配返回-1
"""
i = 0; j = 0 # 初始化游标
lenS = len(String)
lenP = len(Pattern)
while(j != lenP and i != lenS):
if (String[i] == Pattern[j]):
i += 1
j += 1
else:
i = (i-j+1)
j = 0
if (j == lenP):
return (i-j)
else:
return -1
## 复杂度分析
其实BF算法的两个特性都是不好的特性, 特性一: 文本串和模式串都要移动,
字符移动匹配的次数增多。<br/>
特性二就更坑爹了, 设想下面这种情况:
文本串S: abcabd
模式串P: abd
当c和d不匹配时, BF算法会将abd向右移动一位, 可是Sa(S中的a字符, 下同)==Pa!=Sb,
所以S中的第二个b和P中第一个a是一定不会相等的, 这样这次比较就没有任何意义。 <br/>
这两点坏特性的解决, 是后面KMP算法干的事情。 <br/>
BF算法时间复杂度: <br/>
设: M = len(S), N = len(P), BF算法最坏情况下需要进行```N*(M-N+1)```, 时间复杂度是O(M*N)
## 扩展阅读
无
<file_sep>/notes/lecture4.md
# 第四讲: Quick Union Improve
## Weighted quick union
small size root merge to big size root
<file_sep>/notes/lecture1.md
# 不相交集合数据结构 Dynamic Connectivity
DC --> Union-Find Set
## Union
Replace Components containing two objects with their union. {x1, x2}
## Connection
定义--> connection --> <br/>
+ Reflexive: 自反性
+ Symmetric: 对称性
+ Transitive: 传递性
## Friends in a social network
this is in reality, our social network, but in computer(program): 0 ~ N-1
## Java
N: the number of the objects <br/>
public class UF(int N) <br/>
void union(int p, int q) <br/>
void connected(int p, int q) <br/>
## Dynamic Connectivity Client
--> DClient.java
<file_sep>/notes/lecture3.md
# 第三讲: Quick Find 2 Quick Union
+ same datastructure: id array size of N
+ id[i] is the parent of i
+ change the line 2 tree
so, if 2 elements in same connective compoents, they should have same root!
<file_sep>/notes/execs/QuickUnionUF.java
// QuickUnionUF.java: (快速连接)对并查集的操作
public class QuickUnionUF
{
private int[] id; // the same datastructure as QFUF
private int[] sz; // size of root i elements
public QuickUnionUF(int N)
{
// 构造函数时public
id = new int[N];
for (int i = 0; i < N; i++)
id[i] = i;
sz = new int[N];
for (int i = 0; i < N; i++)
sz[i] = 1;
}
private int root(int i)
{
// 计算根节点
while (i != id[i])
{
i = id[i];
} // DFS
return i;
}
public boolean connected(int p, int q) {
return root(p) == root(q);
}
public void union(int p, int q) {
int pr = root[p];
int qr = root[q];
if (pr == qr) {
return;
}
if (sz[pr] < sz[qr]) {
id[pr] = qr;
sz[qr] += sz[pr];
} else {
id[qr] = pr;
sz[pr] += sz[qr];
}
}
}
<file_sep>/sorts/select_sort.py
# coding: utf-8
# select sort : 选择排序python实现
# 选择排序的基本思路就是: 将待排序的序列分成两部分(已排序, 未排序)
# 然后在未排序的序列中选择最小的放入已经排序的适当位置
# [1][2][3][][][][][][][][]
# 在选择区比较选择, 在排序区排序
def select_sort(lit):
"""选择排序"""
t = 0 # 中间变量
n = len(lit)
for i in range(n):
index = i
for j in range(i+1, n):
if lit[j] < lit[index]:
index = j
if i != index:
t = lit[index]
lit[index] = lit[i]
lit[i] = t
return lit
if __name__ == "__main__":
lit = []
user_input = raw_input(">> 输入待排序序列 ")
str_lit = user_input.split()
for str_i in str_lit:
lit.append(int(str_i))
print select_sort(lit)
<file_sep>/sorts/pao_sort.py
# coding: utf-8
# pao_sort: 冒泡排序
# 冒泡排序的关键是相邻值之间进行比较排序
# 冒泡排序提高效率的一个方法就是立flag
def simple_pao_sort(lit):
"""冒泡排序"""
temp = 0
n = len(lit)
for i in range(n):
for j in range(n-1):
if lit[j] > lit[j+1]:
temp = lit[j]
lit[j] = lit[j+1]
lit[j+1] = temp
return lit
def flag_pao_sort(lit):
"""
优化冒泡排序
只用当出现交换, 也就是大小需要调整时
才进行排序, 采用flag进行判断
"""
flag = 1
temp = 0
if flag:
for i in range(len(lit)):
for j in range(len(lit)-1):
if lit[j] > lit[j+1]:
temp = lit[j]
lit[j] = lit[j+1]
lit[j+1] = temp
flag = 1
else:
flag = 0
return lit
# main
if __name__ == "__main__":
lit = []
user_input = raw_input("待排序序列>> ")
str_list = user_input.split()
for i in str_list:
lit.append(int(i))
print simple_pao_sort(lit)
print flag_pao_sort(lit)
<file_sep>/str_match/KMP.md
# 字符串匹配算法~KMP算法
## 问题描述
BF算法可以解决字符串匹配的问题,
通过[这篇文章](https://github.com/neo1218/algor/blob/master/str_match/BF.md)的分析, BF算法有两个缺点
+ 模式串和文本串都在移动
+ 每次匹配失效只向右移动一位
KMP算法改进了这两点, 每次匹配时文本串不移动, 模式串经计算后确定向右移动的位数
## 算法介绍
KMP算法的关键就是**求出模式串中每个字符匹配失效后下次移动到的位置**,
这些位置的集合用next数组表示。求解next数组需要理解下面的概念:<br/>
### 前缀&后缀
比如字符串 "simple"
字符串: simple
前缀: s, si, sim, simp, simpl
后缀: imple, mple, ple, le, e
### 部分匹配表
部分匹配表是对应模式串中每个字符**最长公共前缀后缀**的长度, 举个例子:
字符串: ABCDABD
部分匹配表:
-------------------------------------------------------------
元素 | A | B | C | D | A | B | D |
-------------------------------------------------------------
子串 |无 | A |AB |ABC |ABCD |ABCDA |ABCDAB |
-------------------------------------------------------------
前缀 |无 |无 | A |A,AB|A,AB,ABC|A,AB,ABC,ABCD|A,AB,ABC,ABCD,ABCDA|
-------------------------------------------------------------
后缀 |无 |无 | B |BC,C|BCD,CD,D|BCDA,CDA,DA,A|BCDAC,CDAB,DAB,AB,B|
-------------------------------------------------------------
值 |-1 | 0 | 0 | 0 | 0 | 1 | 2 |
-------------------------------------------------------------
(初始值置为-1)
## 结合部分匹配表确定模式串字符移动位数
用上面的部分匹配表举个例子:
文本串S: ABCABCDABD
模式串P: ABCDABD
i
S: ABCABCDABD
P: ABCDABD
j
[第一次匹配]
S[i] == P[j], i++, j++
i
S: ABCABCDABD
P: ABCDABD
j
[第二次匹配]
S[i] == P[j], i++, j++
i
S: ABCABCDABD
P: ABCDABD
j
[第三次匹配]
S[i] == P[j], i++, j++
i
S: ABCABCDABD
P: ABCDABD
j
[第四次匹配]
S[i] != P[j], 查表!
D对应的部分匹配表的值是:0
此时模式串向右移动(i-0=3)位(文本串并未移动)
i
S: ABCABCDABD
P: ABCDABD
[省略] 匹配成功!
## 算法实现
def make_next(t, next):
j = 0
n = len(t)
next.append(0)
for i in range(1, n):
while(j > 0 and (t[j] != t[i])):
j = next[j-1]
if (t[i] == t[j]):
j = j+1
next.append(j)
def kmp(s, t):
j = 0
m = len(s); n = len(t)
next = []
make_next(t, next)
for i in range(m-n+1):
while (s[i] == t[j]):
i += 1
j += 1
if (j == n - 1):
return True
if (j == n):
break
else:
j = next[j]
return False
## 扩展阅读
+ [阮一峰的博客:关于部分匹配表的解释](http://www.ruanyifeng.com/blog/2013/05/Knuth–Morris–Pratt_algorithm.html)
<file_sep>/package/README.md
# 在组合问题中使用蛮力法
**首先: 用蛮力法求解组合问题仅适用于解空间比较小的情况, 如果规模过大会出现
组合爆炸💥 现象!**
## 生成排列对象
应用蛮力法生成{1, 2...., n}的所有n!个排列:
<hr/>
[python实现]
def gen_arrangement(n):
""" 生成n的全排列 """
[C++实现]
I love writing code
coding & coding
<hr/>
## 生成子集
<file_sep>/notes/lecture5.md
# 第五讲: 并查集的应用
1. 物理: 渗透理论的研究(然而并听不懂, 但是感觉很有用的样子)
+ 流体通过某种有孔介质的连通性
2. 这一讲听的很轻松, 因为我知道很多东西我也听不明白, 不过确实体现了计算机的应用广泛
但是我其实只是想要关注于计算机和互联网领域 UI, 交互(on terminal)
terminal UI, 我想开发一个terminal的UI组建库(shell版, Python版)供这些开发者使用
我觉得这些对我来说是非常有意义的事情
1. auto reload static generator
2. a async web framework
算法就像是一把剑, 可能在这个时代你不需要剑, 但是剑绝对是有用的
算法就是这样的剑
瓶颈:
生活上:
学习上:
计划上:
我想成为什么?
<file_sep>/str_match/match.py
# coding: utf-8
# python: 关于字符串匹配的代码
import sys
# 朴素的模式匹配
def BF(s, t):
"""
基本思想就是目标串固定不动,
子串进行对齐移位(移位是固定的1)
s: 目标串
t: 模式串
"""
m = len(s)
n = len(t)
for i in range(m-n+1):
if (t == s[i:i+n]):
return True
else:
return False
"""
算法效率分析
===>
此处算法执行的基本语句是判断(t == s[i:i+n])
也就是每次循环都需要进行n次判断, 共进行m-n+1次循环, 所以
算法效率 = O((m-n+1)*n) = O(mn-n*n+n) = O(mn)
"""
# =========================================================================== #
# KMP算法
def make_next(t, next):
"""
kmp算法的关键: 获取next数组
next数组采用递推的方法获取, 已知next[0] = 0
关于参数:
--> t: 模式字符串
--> next: 待改造的next数组
--> k: next[j]=k的k值
next[j]=k 代表下一次匹配时j的位置
实际意义是: 失效匹配之前的子串前缀(后缀)的最大长度
"""
k = 0
j = 1
next[0] = 0 # 第0位没有前缀和后缀
while (j < t[0]):
if ((k == 0) or (t[j] == t[k])):
j += 1
k += 1
next[k] = j
else:
k = next[k]
def kmp(s, t):
"""
s: 目标串
t: 模式串
这里i,j(s, t)都是遵循计算机的常用记法:
从0开始计数的,所以关于边界的设定:
1. 整体循环的边界:因为主串是不移动的,所以是 i < (m - n)
2. 进入循环判定后: 开始移位, 首先是同时移位进行匹配判断
2.1 这里又涉及模式串长度的界限判定问题: 如果保持单一移位的状态
一直到模式串的最后一位(n-1)位, 那么匹配成功(return True)
2.2 这里涉及越界的问题, 其实这个问题完全可以避免
"""
i = 0
j = 0
m = len(s)
n = len(t)
next = range(n)
while (j < n and i < (m - n)):
while (s[i] == t[j]):
i += 1
j += 1
if (j == n - 1):
return True
else:
# 这里是kmp优化的重点
# 主串不移动, 模式串移动(j - next[j])的位置
make_next(t, next)
j = next[j]
else:
return False
"""
kmp算法是对BF(朴素模式匹配)的改进, 具体的改进点在于匹配失败后的移位处理:
不再是盲目的向右移动一位,而是基于匹配失败位之前子串的匹配信息进行移位, 这样
可以避免不必要的移位
kmp算法效率分析
===>
kmp算法的时间复杂度是O(n+m), 当模式串的长度远小于问题规模时时间复杂度是O(n)
"""
"""main: for test string match"""
if __name__ == "__main__":
s = "this is a long string to test kmp"
t = "exo" # hope to False
p = "his" # hope to True
if sys.argv[1] == 'kmp':
print "----test kmp----"
print "%s in %s: " % (t, s)
# print "---->", kmp(s, t)
print "%s in %s: " % (p, s)
print "---->", kmp(s, p)
elif sys.argv[1] == 'bf':
print "----test bf----"
print "%s in %s: " % (t, s)
print "---->", BF(s, t)
print "%s in %s: " % (p, s)
print "---->", BF(s, p)
<file_sep>/str_match/kmp.cpp
/* kmp: kmp算法C++实现 纯代码 无笔记注释版 */
# include <vector>
# include <deque>
# include <list>
# include <set>
# include <map>
# include <unordered_set>
# include <unordered_map>
# include <iterator>
# include <algorithm>
# include <numeric>
# include <functional>
# include <string>
# include <iostream>
using namespace std;
void make_next(string, vector<int>);
bool kmp(string, string);
void make_next(string t, vector<int> next)
{
/* 这里利用的是递推的思想
* 已知 next[0] = 0, next[1] = 0
* 求 next数组(向量)
* a b a b a
* i-> j->*/
int j(0);
int n = t.length();
next.push_back(0);
for (int i = 1; i < n; i++)
{
while (j > 0 && (t[j] != t[i]))
j = next[j-1];
if (t[i] == t[j])
j++;
next.push_back(j);
}
}
bool kmp(string s, string t)
{
int m = s.length();
int n = t.length();
vector<int> next;
for (int i = 0; i < n; i++)
next.push_back(0);
int j(0);
for (int i = 0; i < (m-n+1); i++)
{
while (s[i] == t[j])
{
i++;
j++;
if (j == (n - 1)) return true;
else if (j == n) break;
}
make_next(t, next);
j = next[j];
}
return false;
}
int main()
{
using namespace std;
/* main for test */
string s, t;
cout << " <--- test kmp(c++) ---> \n";
cout << "目标串: ";
getline(cin, s);
cout << "\n模式串: ";
getline(cin, t);
cout << t << " in " << s << "-->" << kmp(s, t) << "\n";
return 0;
}
/* 现在还有bug, 关于输入的空格问题, 也就是实现多行输入 */
/* 关于make_next: make_next 中的next数组实际上就是最大匹配表 */
using namespace std;
void make_next2(string t, vector<int> next)
{
/* t: 模式串, next: 最大匹配表 */
int j(0);
int n = t.length();
next.push_back(0);
for (int i = 0; i < n; i++)
{
while(j > 0 && t[j] != t[0])
{
j = next[j-1]
}
}
}
<file_sep>/README.md
# algor-code
knowledge @ online
1. **算法分析**
+ [非递归算法与递归算法分析](http://neo1218.github.io/2016/04/27/algor/)
2. **字符串匹配算法**
+ [BF算法](https://github.com/neo1218/algor/blob/master/str_match/BF.md)
+ [KMP算法](https://github.com/neo1218/algor/blob/master/str_match/KMP.md)
+ [BM算法]()
+ [Sunday算法]()
3. **排序算法**
+ 选择排序
+ [python实现](https://github.com/neo1218/algor/blob/master/sorts%2Fselect_sort.py)
+ [C++实现](https://github.com/neo1218/algor/blob/master/sorts%2Fselect_sort.cpp)
+ 冒泡排序
+ [python实现](https://github.com/neo1218/algor/blob/master/sorts%2Fselect_sort.py)
+ [C++实现](https://github.com/neo1218/algor/blob/master/sorts%2Fselect_sort.cpp)
+ 归并排序
+ [python实现](https://github.com/neo1218/algor/blob/master/sorts%2Fmerge_sort.py)
+ [C++实现]()
+ 快速排序
+ [python实现]()
+ [C++实现]()
4. **组合问题**
+ ***蛮力法***
+ [组合问题中的蛮力法分析]()
+ [0/1背包问题蛮力法求解]()
+ [任务分配问题蛮力法求解]()
5. **图问题**
+ ***蛮力法***
+ [哈密顿回路问题]()
+ [TSP问题]()
6. **分治法**
+ ***递归***
+ [汉洛塔问题]()
<file_sep>/hanoi/hanoi.py
# coding: utf-8
# hanoi 问题
# 递归思想的应用
def MOVE(A, B):
print "%s ----> %s" % (A, B)
def Hanoi(A, B, C, n):
if n == 1:
MOVE(A, C)
else:
Hanoi(A, C, B, n-1)
MOVE(A, C)
Hanoi(B, A, C, n-1)
# main
n = input()
Hanoi("A", "B", "C", n)
<file_sep>/notes/execs/UF/TestUnionFind.java
// 测试UF算法实现
// 测试UnionFind的过程就是对UnionFind算法的一种应用过程
public class TestUnionFind
{
public void main(String args[])
{
// main主测试函数
UnionFind uf = new UnionFind(10);
}
}
<file_sep>/package/package.py
# coding: utf-8
# package.py
# 0/1 背包问题~蛮力法求解
<file_sep>/sorts/select_sort.cpp
/* select_sort.cpp 选择排序*/
# include <vector>
# include <deque>
# include <list>
# include <set>
# include <map>
# include <unordered_set>
# include <unordered_map>
# include <iterator>
# include <algorithm>
# include <numeric>
# include <functional>
# include <iostream>
using namespace std;
int main()
{
int i(0), j(0), index(0), n(0), t(0);
vector<int> lit;
cout << ">> 输入序列的长度 ";
cin >> n;
cout << ">> 输入待排序的序列 ";
for (i = 0; i < n; i++)
{
cin >> j;
lit.push_back(j);
}
for (i = 0; i < n; i++)
{
index = i;
for (j = i+1; j < n; j++)
{
if (lit[j] < lit[index])
index = j;
if (index != i)
{
t = lit[i];
lit[i] = lit[index];
lit[index] = t;
}
}
}
for (i:lit) cout << i << " ";
cout << endl;
return 0;
}
<file_sep>/tests/kmp.py
# coding: utf-8
"""
kmp.py
~~~~~~
kmp算法Python实现
"""
def kmp(String, Pattern):
"""
String: 文本串
Pattern: 模式串
pnext: next数组
返回模式串在文本串中的第一个匹配位置,
否则返回-1
"""
pnext = make_next(Pattern)
slen = len(String)
plen = len(Pattern)
i = 0; j = 0
while(i != slen and j != plen):
if (String[i] == Pattern[j]):
i+=1; j+=1
else:
j = pnext[j]
if (j == plen):
return (i-j)
else:
return -1
def make_next(Pattern):
"""
Pattern: 模式串
pnext: next数组
"""
plen = len(Pattern)
pnext = range(plen)
pnext[0] = -1 # 置初值
k = -1; j = 0
while (j < plen-1):
if (k==-1 and Pattern[j]==Pattern[k]):
k+=1; j+=1
pnext[j]=k
else:
k = pnext[k]
return pnext
if __name__ == "__main__":
print "test1"
String = "ABCDAB"
Pattern = "ABD"
print kmp(String, Pattern)
print "test2"
String = "ABCDAB"
Pattern = "DAB"
print kmp(String, Pattern)
<file_sep>/notes/lecture6.md
# 第六讲: 算法分析介绍
这一讲主要是介绍性的, 可以用科学和数学的研究方法[研究算法]
<file_sep>/notes/execs/UF/UnionFind.java
public class UnionFind
{
private int id[];
UnionFind(int N)
{
// 构造函数
id = new int[N];
}
void Union(int p, int q)
{
// 连接操作(连接p, q)
if (id[p] == id[q])
{ return; }
else
{ id[p] = id[q]; }
}
boolean connected(int p, int q)
{
// 判断连通性操作
return id[p] == id[q]
}
}
| f572df4e278915e432d9cae7267dd2b9a8599056 | [
"Markdown",
"Java",
"Python",
"C++"
] | 28 | Python | neo1218/algor | 43251553295a6f7b3a9158b66a80996d2f6c534c | 0434d9a13b1ae643eecefb7ec19b2d7af5a6aa9d |
refs/heads/master | <repo_name>briansunter/bs-react-transition-group<file_sep>/example/src/Component1.bs.js
// Generated by BUCKLESCRIPT VERSION 4.0.5, PLEASE EDIT WITH CARE
'use strict';
var Block = require("bs-platform/lib/js/block.js");
var Curry = require("bs-platform/lib/js/curry.js");
var React = require("react");
var Transition = require("bs-react-transition-group/src/Transition.js");
var ReasonReact = require("reason-react/src/ReasonReact.js");
var component = ReasonReact.reducerComponent("Example");
function make() {
return /* record */[
/* debugName */component[/* debugName */0],
/* reactClassInternal */component[/* reactClassInternal */1],
/* handedOffState */component[/* handedOffState */2],
/* willReceiveProps */component[/* willReceiveProps */3],
/* didMount */component[/* didMount */4],
/* didUpdate */component[/* didUpdate */5],
/* willUnmount */component[/* willUnmount */6],
/* willUpdate */component[/* willUpdate */7],
/* shouldUpdate */component[/* shouldUpdate */8],
/* render */(function (self) {
return React.createElement("div", undefined, ReasonReact.element(undefined, undefined, Transition.make(self[/* state */1][/* start */0], 600, undefined, undefined, (function (animationState) {
if (animationState !== 1) {
if (animationState !== 0) {
return "State: " + Transition.stringOfState(animationState);
} else {
return "State: Entering";
}
} else {
return "State: Entered";
}
}))), React.createElement("button", {
disabled: self[/* state */1][/* start */0],
onClick: (function () {
return Curry._1(self[/* send */3], /* Start */0);
})
}, "Start Animation"), React.createElement("button", {
disabled: !self[/* state */1][/* start */0],
onClick: (function () {
return Curry._1(self[/* send */3], /* End */1);
})
}, "End Animation"));
}),
/* initialState */(function () {
return /* record */[/* start */false];
}),
/* retainedProps */component[/* retainedProps */11],
/* reducer */(function (action, _) {
if (action) {
return /* Update */Block.__(0, [/* record */[/* start */false]]);
} else {
return /* Update */Block.__(0, [/* record */[/* start */true]]);
}
}),
/* jsElementWrapped */component[/* jsElementWrapped */13]
];
}
exports.component = component;
exports.make = make;
/* component Not a pure module */
<file_sep>/src/CSSTransition.js
// Generated by BUCKLESCRIPT VERSION 4.0.5, PLEASE EDIT WITH CARE
'use strict';
var ReasonReact = require("reason-react/src/ReasonReact.js");
var ReactTransitionGroup = require("react-transition-group");
function make($staropt$star, timeout, $staropt$star$1, $staropt$star$2, $staropt$star$3, $staropt$star$4, $staropt$star$5, $staropt$star$6, classNames, children) {
var in_ = $staropt$star !== undefined ? $staropt$star : false;
var className = $staropt$star$1 !== undefined ? $staropt$star$1 : "";
var mountOnEnter = $staropt$star$2 !== undefined ? $staropt$star$2 : false;
var unmountOnExit = $staropt$star$3 !== undefined ? $staropt$star$3 : false;
var appear = $staropt$star$4 !== undefined ? $staropt$star$4 : false;
var enter = $staropt$star$5 !== undefined ? $staropt$star$5 : true;
var exit = $staropt$star$6 !== undefined ? $staropt$star$6 : true;
return ReasonReact.wrapJsForReason(ReactTransitionGroup.CSSTransition, {
in: in_,
timeout: timeout,
classNames: classNames,
className: className,
mountOnEnter: mountOnEnter,
unmountOnExit: unmountOnExit,
appear: appear,
enter: enter,
exit: exit
}, children);
}
exports.make = make;
/* ReasonReact Not a pure module */
<file_sep>/lib/js/src/TransitionGroup.js
// Generated by BUCKLESCRIPT VERSION 4.0.5, PLEASE EDIT WITH CARE
'use strict';
var ReasonReact = require("reason-react/lib/js/src/ReasonReact.js");
var ReactTransitionGroup = require("react-transition-group");
function make($staropt$star, $staropt$star$1, children) {
var className = $staropt$star !== undefined ? $staropt$star : "";
var component = $staropt$star$1 !== undefined ? $staropt$star$1 : "div";
return ReasonReact.wrapJsForReason(ReactTransitionGroup.TransitionGroup, {
component: component,
className: className
}, children);
}
exports.make = make;
/* ReasonReact Not a pure module */
<file_sep>/README.md
# bs-react-transition-group
## Usage
```
type state = {start: bool};
type action =
| Start
| End;
let component = ReasonReact.reducerComponent("Example");
let make = (_children) => {
...component,
initialState: () => {start: false},
reducer: (action, state) =>
switch (action) {
| Start => ReasonReact.Update({...state, start: true})
| End => ReasonReact.Update({...state, start:false})
},
render: self =>
<div>
<Transition timeout=600 in_=self.state.start>
...((animationState) =>
switch(animationState) {
| Transition.Entering => (ReasonReact.string("State: " ++ "Entering"))
| Transition.Entered => (ReasonReact.string("State: " ++ "Entered"))
| _ => (ReasonReact.string("State: " ++ Transition.stringOfState(animationState)))
}
)
</Transition>
<button onClick=(_ => self.send(Start)) disabled={self.state.start}>
(ReasonReact.string("Start Animation"))
</button>
<button onClick=(_ => self.send(End)) disabled= {! self.state.start}>
(ReasonReact.string("End Animation"))
</button>
</div>
};
``` <file_sep>/src/Transition.js
// Generated by BUCKLESCRIPT VERSION 4.0.5, PLEASE EDIT WITH CARE
'use strict';
var Curry = require("bs-platform/lib/js/curry.js");
var ReasonReact = require("reason-react/src/ReasonReact.js");
var ReactTransitionGroup = require("react-transition-group");
function stateOfString(str) {
switch (str) {
case "entered" :
return /* Entered */1;
case "entering" :
return /* Entering */0;
case "exited" :
return /* Exited */3;
case "exiting" :
return /* Exiting */2;
default:
return /* NotTransitioning */4;
}
}
function stringOfState(str) {
switch (str) {
case 0 :
return "entering";
case 1 :
return "entered";
case 2 :
return "exiting";
case 3 :
return "exited";
case 4 :
return "Not";
}
}
function make($staropt$star, timeout, $staropt$star$1, $staropt$star$2, children) {
var in_ = $staropt$star !== undefined ? $staropt$star : false;
var component = $staropt$star$1 !== undefined ? $staropt$star$1 : "div";
var className = $staropt$star$2 !== undefined ? $staropt$star$2 : "";
return ReasonReact.wrapJsForReason(ReactTransitionGroup.Transition, {
in: in_,
component: component,
timeout: timeout,
className: className
}, (function (strState, _) {
return Curry._1(children, stateOfString(strState));
}));
}
exports.stateOfString = stateOfString;
exports.stringOfState = stringOfState;
exports.make = make;
/* ReasonReact Not a pure module */
| bd141546441770dffc162fc908ef63cbb5432a6a | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | briansunter/bs-react-transition-group | 8a4f1dad7a2238874ba3dca4cd96eaac65e85a8a | 8488bb5b11c0b22c338c67080e0bb5cd01b18fc2 |
refs/heads/master | <file_sep>import psycopg2
import ast
import numpy as np
import simplejson
import urllib
import json
import re
from helpers import *
import os
current_path= os.getcwd()
with open(current_path + '/api_key_list.config') as key_file:
api_key_list = json.load(key_file)
api_key = api_key_list["distance_api_key_list"]
conn_str = api_key_list["conn_str"]
def convert_event_ids_to_lst(event_ids):
try:
if type(ast.literal_eval(event_ids)) == list:
new_event_ids = map(int,ast.literal_eval(event_ids))
else:
event_ids = re.sub("\s+", ",", event_ids.strip())
event_ids = event_ids.replace('.','')
new_event_ids = map(int,event_ids.strip('[').strip(']').strip(',').split(','))
except:
event_ids = re.sub("\s+", " ", event_ids.replace('[','').replace(']','').strip()).split(' ')
new_event_ids = map(int,map(float,event_ids))
return new_event_ids
def add_search_event(poi_name, trip_location_id):
'''
input: name from poi_detail_table; trip_location_id from day_trip_table
output: 7 items of items.
'''
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
cur.execute("SELECT county, state, event_ids FROM day_trip_table WHERE trip_locations_id = '%s' LIMIT 1;" %(trip_location_id))
county, state, event_ids = cur.fetchone()
event_ids = convert_event_ids_to_lst(event_ids)
new_event_ids = tuple(event_ids)
cur.execute("SELECT index, name FROM poi_detail_table WHERE index NOT IN {0} AND county='{1}' AND state='{2}' and name % '{3}' ORDER BY similarity(name, '{3}') DESC LIMIT 7;".format(new_event_ids, county.upper(),state.title(), poi_name))
# print "SELECT index, name FROM poi_detail_table WHERE index NOT IN {0} AND county='{1}' AND state='{2}' and name % '{3}' ORDER BY similarity(name, '{3}') DESC LIMIT 7;".format(new_event_ids, county,state, poi_name)
results = cur.fetchall()
poi_ids, poi_lst = [int(row[0]) for row in results], [row[1] for row in results]
# poi_ids = convert_event_ids_to_lst(poi_ids)
print 'add search result: ', poi_ids, poi_lst
if 7-len(poi_lst)>0:
event_ids.extend(poi_ids)
event_ids = str(tuple(event_ids))
cur.execute("SELECT index, name FROM poi_detail_table WHERE index NOT IN {0} AND county='{1}' AND state='{2}' ORDER BY num_reviews DESC LIMIT {3};".format(event_ids, county.upper(), state.title(), 7-len(poi_lst)))
results.extend(cur.fetchall())
poi_dict = {d[1]:d[0] for d in results}
poi_names = [d[1] for d in results]
conn.close()
return poi_dict, poi_names
def outside_add_search_event(poi_name, outside_route_id):
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
cur.execute("SELECT origin_city, origin_state, event_ids FROM outside_route_table WHERE outside_route_id = '%s' LIMIT 1;" %(outside_route_id))
city, state, event_ids = cur.fetchone()
event_ids = json.loads(event_ids)
new_event_ids = tuple(event_ids)
print new_event_ids, type(new_event_ids), event_ids, type(event_ids)
return new_event_ids, event_ids
def add_event_day_trip(poi_id, poi_name, trip_locations_id, full_trip_id, full_day = True, unseen_event = False, username_id=1):
#day number is sth to remind! need to create better details maybe
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
username_id = 1
cur.execute("select full_day, event_ids, details from day_trip_table where trip_locations_id='%s'" %(trip_locations_id))
(full_day, event_ids, day_details) = cur.fetchone()
cur.execute("select trip_location_ids, details, county, state, n_days from full_trip_table where full_trip_id='%s'" %(full_trip_id))
(trip_location_ids, full_trip_details, county, state, n_days) = cur.fetchone()
event_ids = convert_event_ids_to_lst(event_ids)
day_details = list(eval(day_details))
if not poi_id:
print 'type event_ids', type(event_ids), type(poi_name),str(poi_name).replace(' ','-').replace("'",''), '-'.join(map(str,event_ids))
new_trip_location_id = '-'.join(map(str,event_ids))+'-'+str(poi_name).replace(' ','-').replace("'",'')
cur.execute("select details from day_trip_table where trip_locations_id='%s'" %(new_trip_location_id))
a = cur.fetchone()
if bool(a):
conn.close()
details = ast.literal_eval(a[0])
return trip_locations_id, new_trip_location_id, details
else:
cur.execute("select max(index) from day_trip_table;")
new_index = cur.fetchone()[0]+1
#need to make sure the type is correct for detail!
day = day_details[-1]['day']
new_event_detail = {"name": poi_name, "day": day, "coord_lat": "None", "coord_long": "None","address": "None", "id": "None", "city": "", "state": ""}
for index, detail in enumerate(day_details):
if type(detail) == str:
day_details[index] = ast.literal_eval(detail)
day_details.append(new_event_detail)
#get the right format of detail: change from list to string and remove brackets and convert quote type
day_detail = str(day_details).replace("'","''")
event_ids.append(poi_name)
event_ids = str(event_ids).replace("'","''")
cur.execute("INSERT INTO day_trip_table VALUES (%i, '%s',%s,%s,'%s','%s','%s','%s','%s');" %(new_index, new_trip_location_id, full_day, False, county, state, day_detail,'add',event_ids))
conn.commit()
conn.close()
return trip_locations_id, new_trip_location_id, day_detail
else:
if trip_locations_id.isupper() or trip_locations_id.islower():
new_trip_location_id = '-'.join(map(str,event_ids))+'-'+str(poi_id)
else:
# db_event_cloest_distance(trip_locations_id=None,event_ids=None, event_type = 'add',new_event_id = None, city_name =None)
print 'add: ', trip_locations_id, poi_id
event_ids, event_type = db_event_cloest_distance(trip_locations_id=trip_locations_id, new_event_id=poi_id)
event_ids, driving_time_list, walking_time_list = db_google_driving_walking_time(event_ids,event_type = 'add')
new_trip_location_id = '-'.join(map(str,event_ids))
event_ids = map(int,list(event_ids))
cur.execute("select details from day_trip_table where trip_locations_id='%s'" %(new_trip_location_id))
a = cur.fetchone()
if not a:
details = []
if type(day_details[0]) == dict:
event_day = day_details[0]['day']
else:
event_day = ast.literal_eval(day_details[0])['day']
for item in event_ids:
cur.execute("select index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url from poi_detail_table where index = '%s';" %(item))
a = cur.fetchone()
detail = {'id': a[0],'name': a[1],'address': a[2], 'day': event_day, 'coord_lat': a[3], 'coord_long': a[4], 'city': a[5], 'state': a[6], 'icon_url': a[7], 'check_full_address': a[8], 'poi_type': a[9], 'adjusted_visit_length': a[10], 'img_url': a[11]}
details.append(detail)
#need to make sure event detail can append to table!
cur.execute("select max(index) from day_trip_table;")
new_index = cur.fetchone()[0] +1
event_type = 'add'
cur.execute("insert into day_trip_table (index, trip_locations_id,full_day, regular, county, state, details, event_type, event_ids) VALUES (%s, '%s', %s, %s, '%s', '%s', '%s', '%s', '%s')" %(new_index, new_trip_location_id, full_day, False, county, state, str(details).replace("'",'"'), event_type, str(event_ids)))
conn.commit()
conn.close()
return trip_locations_id, new_trip_location_id, details
else:
conn.close()
#need to make sure type is correct.
if type(a[0]) == str:
return trip_locations_id, new_trip_location_id, ast.literal_eval(a[0])
else:
return trip_locations_id, new_trip_location_id, a[0]
def add_event_full_trip(old_full_trip_id, old_trip_location_id, new_trip_location_id, new_day_details, username_id=1):
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
username_id = 1
cur.execute("select full_day, event_ids, details from day_trip_table where trip_locations_id='%s'" %(new_trip_location_id))
(full_day, event_ids, day_details) = cur.fetchone()
cur.execute("select trip_location_ids, county, state, n_days from full_trip_table where full_trip_id='%s'" %(old_full_trip_id))
(trip_location_ids, county, state, n_days) = cur.fetchone()
trip_location_ids = ast.literal_eval(trip_location_ids)
for i, v in enumerate(trip_location_ids):
if v == old_trip_location_id:
idx = i
trip_location_ids[i] = new_trip_location_id
break
new_full_trip_id = '-'.join(trip_location_ids)
# cur.execute("DELETE FROM full_trip_table WHERE full_trip_id = '%s';" %(new_full_trip_id))
# conn.commit()
if not check_full_trip_id(new_full_trip_id):
new_details = []
for trip_location_id in trip_location_ids:
cur.execute("select details from day_trip_table where trip_locations_id='%s'" %(trip_location_id))
details = cur.fetchone()[0]
details = ast.literal_eval(details)
for detail in details:
if type(detail) == str:
detail = ast.literal_eval(detail)
new_details.append(detail)
#Need to confirm type!
# for detail in details:
# print type(detail), 'my type bae before id'
# new_full_trip_details.append(ast.literal_eval(detail))
cur.execute("SELECT max(index) from full_trip_table;")
new_index = cur.fetchone()[0] + 1
cur.execute("INSERT INTO full_trip_table(index, username_id, full_trip_id,trip_location_ids, regular, county, state, details, n_days) VALUES (%s, %s, '%s', '%s', %s, '%s', '%s', '%s', %s);" %(new_index, username_id, new_full_trip_id, str(trip_location_ids).replace("'","''"), False, county, state, str(new_details).replace("'","''"), n_days))
conn.commit()
else:
cur.execute("SELECT trip_location_ids, details FROM full_trip_table WHERE full_trip_id = '%s';"%(new_full_trip_id))
trip_location_ids, new_details = cur.fetchone()
trip_location_ids = ast.literal_eval(trip_location_ids)
new_details = ast.literal_eval(new_details)
conn.close()
return new_full_trip_id, trip_location_ids, new_details
'''
Need to update db for last item delete..need to fix bugs if any
'''
def remove_event(full_trip_id, trip_locations_id, remove_event_id, username_id=1, remove_event_name=None, event_day=None, full_day = True):
#may have some bugs if trip_locations_id != remove_event_id as last one:) test and need to fix
print 'init:', full_trip_id, trip_locations_id, remove_event_id
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
if trip_locations_id == remove_event_id:
if full_trip_id != trip_locations_id:
# full_trip_id = full_trip_id[len(str(trip_locations_id))+1:]
cur.execute("select trip_location_ids from full_trip_table where full_trip_id = '%s';" %(full_trip_id))
# cur.execute("select trip_location_ids, details from full_trip_table where full_trip_id = '%s';" %(full_trip_id))
trip_location_ids = cur.fetchone()[0]
trip_location_ids = ast.literal_eval(trip_location_ids)
trip_location_ids.remove(str(trip_locations_id))
full_trip_details = []
for trip_id in trip_location_ids:
cur.execute("select details from day_trip_table where trip_locations_id = '%s';" %(trip_id))
details = cur.fetchone()[0]
trip_details = ast.literal_eval(details)
full_trip_details.extend(trip_details)
conn.close()
new_full_trip_id = '-'.join(trip_location_ids)
for index, detail in enumerate(full_trip_details):
full_trip_details[index] = ast.literal_eval(detail)
full_trip_details[index]['address'] = full_trip_details[index]['address'].strip(', ').replace(', ,',',')
print full_trip_details, new_full_trip_id, trip_location_ids
return new_full_trip_id, full_trip_details, trip_location_ids
return '','',''
print 'remove id: ', trip_locations_id
cur.execute("select * from day_trip_table where trip_locations_id='%s'" %(trip_locations_id))
(index, trip_locations_id, full_day, regular, county, state, detail, event_type, event_ids) = cur.fetchone()
new_event_ids = convert_event_ids_to_lst(event_ids)
remove_event_id = int(remove_event_id)
new_event_ids.remove(remove_event_id)
new_trip_locations_id = '-'.join(str(event_id) for event_id in new_event_ids)
# if check_id:
# return new_trip_locations_id, check_id[-3]
detail = ast.literal_eval(detail[1:-1])
for index, trip_detail in enumerate(detail):
if type(trip_detail) == str:
if ast.literal_eval(trip_detail)['id'] == remove_event_id:
remove_index = index
break
else:
if trip_detail['id'] == remove_event_id:
remove_index = index
break
new_detail = list(detail)
new_detail.pop(remove_index)
new_detail = str(new_detail).replace("'","''")
regular = False
cur.execute("select * from day_trip_table where trip_locations_id='%s'" %(new_trip_locations_id))
check_id = cur.fetchone()
if not check_id:
cur.execute("select max(index) from day_trip_table;")
new_index = cur.fetchone()[0]
new_index+=1
cur.execute("INSERT INTO day_trip_table VALUES (%i, '%s', %s, %s, '%s', '%s', '%s', '%s','%s');" \
%(new_index, new_trip_locations_id, full_day, regular, county, state, new_detail, event_type, new_event_ids))
conn.commit()
conn.close()
new_full_trip_id, new_full_trip_details,new_trip_location_ids = new_full_trip_afer_remove_event(full_trip_id, trip_locations_id, new_trip_locations_id, username_id=1)
print 'delete trip details: ', new_full_trip_details
return new_full_trip_id, new_full_trip_details,new_trip_location_ids, new_trip_locations_id
def new_full_trip_afer_remove_event(full_trip_id, old_trip_locations_id, new_trip_locations_id, username_id=1):
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
username_id = 1
cur.execute("SELECT trip_location_ids, regular, county, state, details, n_days FROM full_trip_table WHERE full_trip_id = '{}' LIMIT 1;".format(full_trip_id))
trip_location_ids, regular, county, state, details, n_days = cur.fetchone()
trip_location_ids = ast.literal_eval(trip_location_ids)
trip_location_ids[:] = [new_trip_locations_id if x==old_trip_locations_id else x for x in trip_location_ids]
new_full_trip_id = '-'.join(trip_location_ids)
new_full_trip_details = []
for trip_locations_id in trip_location_ids:
cur.execute("SELECT details FROM day_trip_table WHERE trip_locations_id = '{}' LIMIT 1;".format(trip_locations_id))
detail = cur.fetchone()[0]
detail = ast.literal_eval(detail)
detail[:] = [ast.literal_eval(x) if type(x) == str else x for x in detail]
new_full_trip_details.extend(detail)
regular=False
if not check_full_trip_id(new_full_trip_id):
cur.execute("SELECT max(index) FROM full_trip_table;")
full_trip_index = cur.fetchone()[0] + 1
cur.execute("INSERT INTO full_trip_table(index, username_id, full_trip_id,trip_location_ids, regular, county, state, details, n_days) VALUES (%s, %s, '%s', '%s', %s, '%s', '%s', '%s', %s);" %(full_trip_index, username_id, new_full_trip_id, str(trip_location_ids).replace("'","''"), regular, county, state, str(new_full_trip_details).replace("'","''"), n_days))
conn.commit()
conn.close()
return new_full_trip_id, new_full_trip_details,trip_location_ids
def event_type_time_spent(adjusted_normal_time_spent):
if adjusted_normal_time_spent > 180:
return 'big',
elif adjusted_normal_time_spent >= 120:
return 'med'
else:
return 'small'
#Model: get cloest events within a radius: min 3 same type of events (poi_type), 3 within the radius 10 mile, 3 same time spent
def suggest_event_array(full_trip_id, trip_location_id, switch_event_id, username_id,max_radius = 10*1609.34):
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
cur.execute("SELECT event_ids FROM day_trip_table WHERE trip_locations_id = '%s' LIMIT 1;" %(trip_location_id))
old_event_ids = convert_event_ids_to_lst(cur.fetchone()[0])
old_event_ids = map(int, old_event_ids)
cur.execute("SELECT index, name, coord_lat, coord_long,poi_type, adjusted_visit_length,num_reviews FROM poi_detail_table where index=%s;" %(switch_event_id))
index, name, coord_lat, coord_long,poi_type, adjusted_normal_time_spent,num_reviews = cur.fetchone()
event_type = event_type_time_spent(adjusted_normal_time_spent)
if event_type == 'big':
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and adjusted_visit_length>180 and poi_type='{0}' and index NOT IN {3} ORDER BY num_reviews LIMIT 7;".format(poi_type, coord_long,coord_lat,tuple(old_event_ids)))
elif event_type == 'med':
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and adjusted_visit_length>=120 and adjusted_visit_length<=180 and poi_type='{0}' and index NOT IN {3} ORDER BY num_reviews LIMIT 7;".format(poi_type, coord_long,coord_lat,tuple(old_event_ids)))
else:
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and adjusted_visit_length<120 and poi_type = '{0}' and index NOT IN {3} ORDER BY num_reviews LIMIT 7;".format(poi_type, coord_long,coord_lat,tuple(old_event_ids)))
suggest_event_lst = cur.fetchall()
rank_one_idx = [x[0] for x in suggest_event_lst]
old_event_ids.extend(rank_one_idx)
old_event_ids = map(int, old_event_ids)
limit_len = min(7- len(suggest_event_lst), 3)
if limit_len:
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and poi_type='{0}' and index not in {3} ORDER BY num_reviews LIMIT {4};".format(poi_type, coord_long,coord_lat, tuple(old_event_ids), limit_len))
add_suggest_lst = cur.fetchall()
if add_suggest_lst:
suggest_event_lst.extend(add_suggest_lst)
limit_len = min(7- len(suggest_event_lst), 3)
rank_one_idx = [x[0] for x in suggest_event_lst]
old_event_ids.extend(rank_one_idx)
old_event_ids = map(int, old_event_ids)
if limit_len:
if event_type == 'big':
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and adjusted_visit_length>180 and index NOT IN {3} ORDER BY num_reviews LIMIT {0};".format(limit_len, coord_long,coord_lat,tuple(old_event_ids)))
elif event_type == 'med':
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and adjusted_visit_length>=120 and adjusted_visit_length<=180 and index NOT IN {3} ORDER BY num_reviews LIMIT {0};".format(limit_len, coord_long,coord_lat,tuple(old_event_ids)))
else:
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) <= 10 * 1609.34 and adjusted_visit_length<120 and index NOT IN {3} ORDER BY num_reviews LIMIT {0};".format(limit_len, coord_long,coord_lat,tuple(old_event_ids)))
add_suggest_lst = cur.fetchall()
if add_suggest_lst:
suggest_event_lst.extend(add_suggest_lst)
if 7- len(suggest_event_lst):
rank_one_idx = [x[0] for x in suggest_event_lst]
old_event_ids.extend(rank_one_idx)
old_event_ids = map(int, old_event_ids)
cur.execute("SELECT index, name, address, coord_lat, coord_long, city, state, icon_url, check_full_address, poi_type, adjusted_visit_length, img_url FROM poi_detail_table WHERE poi_type='{0}' and index not in {3} ORDER BY ST_Distance_Sphere(geom, ST_MakePoint({1},{2})) LIMIT {4};".format(poi_type, coord_long,coord_lat, tuple(old_event_ids), 7- len(suggest_event_lst)))
suggest_dict_list = []
for i, a in enumerate(suggest_event_lst):
suggest_dict_list.append( {'id': a[0],'name': a[1],'address': a[2], 'coord_lat': a[3], 'coord_long': a[4], 'city': a[5], 'state': a[6], 'icon_url': a[7], 'check_full_address': a[8], 'poi_type': a[9], 'adjusted_visit_length': a[10], 'img_url': a[11]})
conn.close()
return suggest_dict_list
def convert_db_details(detail, remove_event_id):
detail = ast.literal_eval(detail[1:-1])
for index, trip_detail in enumerate(detail):
if type(trip_detail) == str:
if ast.literal_eval(trip_detail)['id'] == remove_event_id:
remove_index = index
break
else:
if trip_detail['id'] == remove_event_id:
remove_index = index
break
new_detail = list(detail)
new_detail.pop(remove_index)
new_detail = str(new_detail).replace("'","''")
regular = False
return
#Get full list of event_ids from new full trip table!
def switch_suggest_event(full_trip_id, update_trip_location_id, update_suggest_event, username_id=1):
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
cur.execute("SELECT trip_location_ids FROM full_trip_table WHERE full_trip_id = '%s';" %(full_trip_id))
# cur.execute("select trip_location_ids, details from full_trip_table where full_trip_id = '%s';" %(full_trip_id))
trip_location_ids = ast.literal_eval(cur.fetchone()[0])
update_suggest_event = ast.literal_eval(update_suggest_event)
full_trip_details = []
full_trip_trip_locations_id = []
new_update_trip_location_id = ''
for trip_location_id in trip_location_ids:
cur.execute("SELECT * FROM day_trip_table WHERE trip_locations_id = '%s' LIMIT 1;" %(trip_location_id))
(index, trip_locations_id, full_day, regular, county, state, detail, event_type, event_ids) = cur.fetchone()
event_ids = convert_event_ids_to_lst(event_ids)
detail = list(ast.literal_eval(detail[1:-1]))
#make sure detail type is dict!
for i,v in enumerate(detail):
if type(v) != dict:
detail[i] = ast.literal_eval(v)
full_day = True
event_type = 'suggest'
for idx, event_id in enumerate(event_ids):
if str(event_id) in update_suggest_event:
regular = False
replace_event_detail = update_suggest_event[str(event_id)]
replace_event_detail['day'] = detail[idx]['day']
detail[idx] = replace_event_detail
event_ids[idx] = replace_event_detail['id']
if not regular:
trip_locations_id = '-'.join(map(str,event_ids))
if not check_day_trip_id(trip_locations_id):
cur.execute("SELECT max(index) FROM day_trip_table;")
new_index = cur.fetchone()[0] + 1
cur.execute("INSERT INTO day_trip_table VALUES (%i, '%s',%s,%s,'%s','%s','%s','%s','%s');" %(new_index, trip_locations_id, full_day, regular, county, state, str(detail).replace("'",'"'),event_type,event_ids))
conn.commit()
if update_trip_location_id == trip_location_id:
new_update_trip_location_id = trip_locations_id
full_trip_details.extend(detail)
full_trip_trip_locations_id.append(trip_locations_id)
print 'return:',full_trip_id, full_trip_details, full_trip_trip_locations_id, new_update_trip_location_id
if full_trip_trip_locations_id != trip_location_ids:
new_full_trip_id = '-'.join(full_trip_trip_locations_id)
if not check_full_trip_id(new_full_trip_id):
n_days = len(trip_location_ids)
regular =False
cur.execute("SELECT max(index) FROM full_trip_table;")
new_index = cur.fetchone()[0] + 1
cur.execute("INSERT INTO full_trip_table VALUES (%s, %s, '%s', '%s', %s, '%s', '%s', '%s', %s);" %(new_index, username_id, new_full_trip_id, str(full_trip_trip_locations_id).replace("'","''"), regular, county, state, str(full_trip_details).replace("'","''"), n_days))
conn.commit()
conn.close()
return new_full_trip_id, full_trip_details, full_trip_trip_locations_id, new_update_trip_location_id
if new_update_trip_location_id == '':
new_update_trip_location_id = update_trip_location_id
return full_trip_id, full_trip_details, full_trip_trip_locations_id, new_update_trip_location_id
#using v1 front end design.
def create_full_trip(full_trip_id, username_id):
conn = psycopg2.connect(conn_str)
cur = conn.cursor()
# print "select * from full_trip_table where full_trip_id='%s' and username_id=%s;" %(full_trip_id, username_id)
cur.execute("select count(1) from full_trip_table where full_trip_id='%s' and username_id=%s;" %(full_trip_id, username_id))
cnt = cur.fetchone()[0]
if cnt != 0:
return False
else:
cur.execute("SELECT max(index) from full_trip_table;")
new_index = cur.fetchone()[0] + 1
cur.execute("select * from full_trip_table where full_trip_id='%s';" %(full_trip_id))
# print "select * from full_trip_table where full_trip_id='%s';" %(full_trip_id)
(index, old_username_id, full_trip_id,trip_location_ids, regular, county, state, details, n_days) = cur.fetchone()
cur.execute("INSERT INTO full_trip_table VALUES (%s, %s, '%s', '%s', %s, '%s', '%s', '%s', %s);" %(new_index, username_id, full_trip_id, trip_location_ids.replace("'",'"'), regular, county, state, details.replace("'",'"'), n_days))
conn.commit()
conn.close()
return True
<file_sep>import React, { PropTypes } from 'react';
import SelectField from 'material-ui/SelectField';
import MenuItem from 'material-ui/MenuItem';
// encodeURIComponent(myUrl)
const MenuItemDays = ({daysValue, handleDaysOnChange}) => {
let boundType = handleDaysOnChange.bind(this);
return (
<div>
<SelectField
floatingLabelText="Days"
value={daysValue}
onChange={boundType}
>
<MenuItem value={'1'} primaryText="1" />
<MenuItem value={'2'} primaryText="2" />
<MenuItem value={'3'} primaryText="3" />
<MenuItem value={'4'} primaryText="4" />
<MenuItem value={'5'} primaryText="5" />
</SelectField>
</div>
)
}
MenuItemDays.propTypes = {
daysValue: PropTypes.string.isRequired,
handleDaysOnChange: PropTypes.func.isRequired,
};
export default MenuItemDays;<file_sep>from django.conf.urls import url, include
from angeltrip import views
from rest_framework.routers import DefaultRouter
from django.conf import settings
from django.conf.urls.static import static
# from . import views as local_views
from rest_framework.authtoken import views as rest_framework_views
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'users', views.UserViewSet)
# router.register(r'accounts', views.UserView, 'list')
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'full_trip/(?P<full_trip_id>[^\.]+)/$',
views.FullTripDetail.as_view(),
name='full_trip_detail'),
url(r'outside_trip/(?P<outside_trip_id>[^\.]+)/$',
views.OutsideTripDetail.as_view(),
name='outside_trip_detail'),
url(r'^full_trip_search/$', views.FullTripSearch.as_view(), name='full-trip-detail'),
url(r'^outside_trip_search/$', views.OutsideTripSearch.as_view(), name='outside-trip-detail'),
url(r'^city_state_search/$', views.CityStateSearch.as_view(), name='city-state-detail'),
url(r'^update_trip/delete/$', views.FullTripDeleteEvent.as_view(), name='full-trip-delete'),
url(r'^update_trip/suggest_search/$', views.FullTripSuggestArray.as_view(), name='full-trip-suggest-search'),
url(r'^update_trip/suggest_confirm/$', views.FullTripSuggestConfirm.as_view(), name='full-trip-suggest-confirm'),
url(r'^update_trip/add_search/$', views.FullTripAddSearch.as_view(), name='full-trip-add-search'),
url(r'^update_trip/add/$', views.FullTripAddEvent.as_view(), name='full-trip-add-event'),
url(r'^create_full_trip/$', views.FullTripCreate.as_view(), name='full-trip-create'),
url(r'^OutsideTripAddSearch/$', views.OutsideTripAddSearch.as_view(), name='OutsideTripAddSearch'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
# url(r'^login/$', local_views.get_auth_token, name='login'),
# url(r'^logout/$', local_views.logout_user, name='logout'),
# url(r'^auth/$', local_views.login_form, name='login_form'),
url(r'^account/get_auth_token/$', rest_framework_views.obtain_auth_token, name='get_auth_token'),
url(r'^account/register', views.create_auth, name='register_user'),
url(r'^iplocation/$', views.IPGeoLocation.as_view()),
# url(r'^api/', include(router.urls)),
# url(r'^users/create_user', views.CreateUserView),
]<file_sep>import React from 'react';
import { Card, CardTitle } from 'material-ui/Card';
import SearchBox from './SearchBox';
const HomePage = () => (
<Card className="container">
<div className="col-xs-6">
<SearchBox />
</div>
<CardTitle title="React Application" subtitle="This is the home page." />
</Card>
);
export default HomePage;
<file_sep>import axios from 'axios';
import React, { Component } from 'react';
import moment from 'moment';
import BlogConstants from '../constants/BlogConstants.jsx';
import TripConstants from '../constants/TripConstants.jsx';
import PostFullTripList from '../components/PostFullTripList.jsx';
import PostOutsideTripList from '../components/PostOutsideTripList.jsx';
export default class Post extends Component {
constructor(props, context) {
super(props, context);
this.state = {
postData: {},
fullTripData: null,
outsideTripData: null,
updateTripLocationId: '',
baseBackgroundImg: '',
}
this.fetchPost = this.fetchPost.bind(this);
this.fetchTripDetails = this.fetchTripDetails.bind(this);
this.postBase = this.postBase.bind(this);
this.getTapName = this.getTapName.bind(this);
}
fetchPost(slug) {
console.log('post url: ', BlogConstants.POST_URL + slug)
axios.get(BlogConstants.POST_URL + slug)
.then(response => {
this.setState({
postData: response.data
});
this.fetchTripDetails();
});
}
fetchTripDetails() {
if (this.state.postData.full_trip_id !== null){
axios.get(TripConstants.FULL_TRIP_URL + this.state.postData.full_trip_id)
.then(response => {
this.setState({
fullTripData: response.data
});
this.postBase();
});
}
if (this.state.postData.outside_trip_id !== null){
axios.get(TripConstants.OUTSIDE_TRIP_URL + this.state.postData.outside_trip_id)
.then(response => {
this.setState({
outsideTripData: response.data
});
this.postBase();
});
}
}
getTapName(updateTripLocationId) {
this.setState({
updateTripLocationId: updateTripLocationId,
});
}
postBase() {
if (this.state.fullTripData !== null) {
console.log('fullTripData img_url: ', this.state.fullTripData)
this.setState({
// baseBackgroundImg: this.state.postData.full_trip_details[0].img_url
baseBackgroundImg: this.state.fullTripData.full_trip_details[0].img_url
});
} else if (this.state.outsideTripData !== null) {
console.log('outsideTripData img_url: ', this.state.outsideTripData.outside_trip_details[0][0].img_url)
this.setState({
baseBackgroundImg: this.state.outsideTripData.outside_trip_details[0][0].img_url
});
}
}
componentWillMount() {
this.fetchPost(this.props.params.slug);
}
render() {
const imgUrl = this.state.baseBackgroundImg;
const postTitle = this.state.postData.title;
const postBody = this.state.postData.body;
const postAurthor = this.state.postData.username;
const postDate = this.state.postData.pub_date ? moment(this.state.postData.pub_date).calendar() : moment().subtract(30, 'days').calendar();
const avatarUrl = 'https://s3.amazonaws.com/travel-with-friends/profile.jpg';
const avatarStyle = {
backgroundImage:`url(${avatarUrl})`,
backgroundColor: '#263238',
backgroundSize: 'cover',
}
const bgImg = {
backgroundSize: 'cover',
backgroundImage: `url(${imgUrl})`,
backgroundColor: '#263238',
};
const postBgImg = {
backgroundSize: 'cover',
backgroundImage: `linear-gradient(rgba(255,255,255,.7), rgba(255,255,255,.7)), url(${imgUrl})`,
backgroundColor: '#263238',
};
return (
<div className="demo-blog demo-blog--blogpost mdl-layout mdl-js-layout has-drawer is-upgraded" style={postBgImg}>
<main className="mdl-layout__content">
<div className="demo-back">
<a className="mdl-button mdl-js-button mdl-js-ripple-effect mdl-button--icon" href={'/posts/'} title="go back" role="button">
<i className="material-icons" role="presentation">arrow_back</i>
</a>
</div>
<div className="demo-blog__posts mdl-grid">
<div className="mdl-card mdl-shadow--4dp mdl-cell mdl-cell--12-col">
<div className="mdl-card__media mdl-color-text--grey-50" style={bgImg}>
<h3>{postTitle}</h3>
</div>
<div className="mdl-color-text--grey-700 mdl-card__supporting-text meta">
<div className="minilogo" style={avatarStyle}></div>
<div>
<strong>{postAurthor}</strong>
<span>{postDate}</span>
</div>
<div className="section-spacer"></div>
</div>
<div className="mdl-color-text--grey-700 mdl-card__supporting-text">
{postBody}
{console.log('outside trip data: ', this.state.outsideTripData)}
{this.state.fullTripData !== null &&
<div className="col-md-8">
<PostFullTripList
fullTripDetails={this.state.fullTripData.full_trip_details}
tripLocationIds={this.state.fullTripData.trip_location_ids}
getTapName={this.getTapName}
/>
</div> }
{this.state.outsideTripData !== null &&
<PostOutsideTripList
outsideRouteDetails={this.state.outsideTripData.outside_trip_details[0]}
getTapName={this.getTapName}
outsideRouteTitle={postTitle}
/>}
</div>
</div>
</div>
</main>
<div className="mdl-layout__obfuscator"></div>
</div>
)
}
}
<file_sep>import React, {PropTypes} from 'react';
import FloatingActionButton from 'material-ui/FloatingActionButton';
import ContentRedo from 'material-ui/svg-icons/content/redo';
const FullTripResetButton = ({onFullTripReset}) => (
<div>
<FloatingActionButton mini={true} onClick={onFullTripReset}>
<ContentRedo />
</FloatingActionButton>
</div>
);
FullTripResetButton.propTypes = {
onFullTripReset: PropTypes.func.isRequired,
};
export default FullTripResetButton;
<file_sep>import React, {PropTypes} from 'react';
import MapsMap from 'material-ui/svg-icons/maps/map';
import RaisedButton from 'material-ui/RaisedButton';
const GoogleMapUrlButton = ({googleMapUrl}) => (
<div>
<RaisedButton
href={googleMapUrl}
labelPosition="before"
target="_blank"
label="Open Google Map"
primary={true}
icon={<MapsMap className="muidocs-icon-google-map" />}
/>
</div>
);
GoogleMapUrlButton.propTypes = {
googleMapUrl: PropTypes.string.isRequired,
};
export default GoogleMapUrlButton;
<file_sep>import React, { PropTypes } from 'react';
import PostNewForm from '../components/LoginForm.jsx';
import Auth from '../services/AuthService';
// import SimpleMDE from 'react-simplemde-editor';
import { FormGroup, FormControl, ControlLabel, Button } from 'react-bootstrap';
import { IndexLinkContainer, LinkContainer } from 'react-router-bootstrap';
import PostMDE from '../components/PostMDE';
class PostNewPage extends React.Component {
/**
* Class constructor.
*/
constructor(props, context) {
super(props, context);
const storedMessage = localStorage.getItem('successMessage');
let successMessage = '';
if (storedMessage) {
successMessage = storedMessage;
localStorage.removeItem('successMessage');
}
// set the initial component state
this.state = {
errors: {},
successMessage,
textValue: "",
user: {
email: '',
password: ''
}
};
this.onTextChange = this.onTextChange.bind(this)
this.onSubmit = this.onSubmit.bind(this)
this.processForm = this.processForm.bind(this);
}
onTextChange(value) {
this.setState({
textValue: value
});
};
onSubmit() {
const body = this.state.textValue;
}
/**
* Process the form.
*
* @param {object} event - the JavaScript event object
*/
processForm(event) {
// prevent default action. in this case, action is the form submission event
event.preventDefault();
const _this = this;
// create a string for an HTTP body message
const email = this.state.user.email;
const password = encodeURIComponent(this.state.user.password);
Auth.login(email, password)
.catch(function(err) {
alert("Error logging in", err)
})
.done(function(greeting) {
if (greeting !== undefined) {
console.log('greeting', greeting);
_this.context.router.replace('/');
}
});
}
/**
* Render the component.
*/
render() {
// const { fields: {title, body, tags}, handleSubmit } = this.props;
return (
<PostMDE />
);
}
}
PostNewPage.contextTypes = {
router: PropTypes.object.isRequired
};
export default PostNewPage;<file_sep>import React, {Component} from 'react';
import { withGoogleMap, GoogleMap, Marker,DirectionsRenderer } from "react-google-maps";
import Helmet from "react-helmet";
export default class DirectionsTrip extends Component {
constructor(props) {
super(props);
this.state = {
directions: null,
directionDetails: {},
directionDetailsTwo: {},
directionsTwo: null,
};
this.getWaypts = this.getWaypts.bind(this)
this.getWayptsTwo = this.getWayptsTwo.bind(this)
this.getDirections = this.getDirections.bind(this)
}
componentWillMount() {
this.setState({
directionDetails: this.getWaypts(this.props.fullTripDetails, this.props.tripLocationIds, this.props.updateTripLocationId),
directionDetailsTwo: this.getWayptsTwo(this.props.fullTripDetails),
});
}
componentWillReceiveProps(nextProps) {
if ((nextProps.fullTripDetails !== this.props.fullTripDetails)) {
this.setState({
directionDetails: this.getWaypts(nextProps.fullTripDetails, nextProps.tripLocationIds, nextProps.updateTripLocationId),
directionDetailsTwo: this.getWayptsTwo(nextProps.fullTripDetails),
});
console.log('updateing directions: ', this.state.directionDetails, this.state.directionDetailsTwo)
}
}
componentDidMount() {
this.getDirections();
}
componentDidUpdate(prevProps, prevState) {
if ((prevProps.fullTripDetails !== this.props.fullTripDetails)) {
console.log('map updated!')
this.getDirections();
}
}
shouldComponentUpdate(nextProps,nextState) {
const differentFullTripDetails = nextProps.fullTripDetails !== this.props.fullTripDetails;
const differentTripLocationId = nextProps.updateTripLocationId !== this.props.updateTripLocationId;
const differentDirectionDetails = nextState.directions !== this.state.directions;
return differentFullTripDetails || differentTripLocationId || differentDirectionDetails;
}
getWaypts = function(fullTripDetails, tripLocationIds, updateTripLocationId) {
let waypts = [];
const currentDay = 0;
const oriIndex = fullTripDetails.findIndex(x => x.day == currentDay);
const dayAry = fullTripDetails.map(function(a) {return a.day;});
const destIndex = dayAry.lastIndexOf(currentDay);
let origin = '';
let location = '';
let destination = '';
for (let i = oriIndex; i <= destIndex; i++){
let addressArr = fullTripDetails[i].address.split(', ');
let newArr = [];
for (let j = 0; j<addressArr.length-1; j++) {
if(isNaN(addressArr[j])) {
newArr.push(addressArr[j]);
}
}
let newAddress = newArr.join(', ');
let cityState = fullTripDetails[i].city + ', '+fullTripDetails[i].state;
if(newAddress === cityState){
location = fullTripDetails[i].name + ', ' + cityState;
// console.log('no coord: ', location)
}
else {
location = new window.google.maps.LatLng(fullTripDetails[i].coord_lat, fullTripDetails[i].coord_long);
}
if(i == oriIndex) {
origin = location;
// console.log(fullTripDetails[i], 'ori')
}
else if(i ==destIndex) {
destination = location;
// console.log(fullTripDetails[i],'dest')
}
else {
waypts.push({location: location, stopover: true});
}
}
return {
origin: origin,
destination: destination,
waypts: waypts
};
}
getWayptsTwo = function(fullTripDetails) {
let waypts = [];
const currentDay = 1;
const oriIndex = fullTripDetails.findIndex(x => x.day == currentDay);
const dayAry = fullTripDetails.map(function(a) {return a.day;});
const destIndex = dayAry.lastIndexOf(currentDay);
let origin = '';
let location = '';
let destination = '';
for (let i = oriIndex; i <= destIndex; i++){
let addressArr = fullTripDetails[i].address.split(', ');
let newArr = [];
for (let j = 0; j<addressArr.length-1; j++) {
if(isNaN(addressArr[j])) {
newArr.push(addressArr[j]);
}
}
let newAddress = newArr.join(', ');
let cityState = fullTripDetails[i].city + ', '+fullTripDetails[i].state;
if(newAddress === cityState){
location = fullTripDetails[i].name + ', ' + cityState;
// console.log('no coord: ', location)
}
else {
location = new window.google.maps.LatLng(fullTripDetails[i].coord_lat, fullTripDetails[i].coord_long);
}
if(i == oriIndex) {
origin = location;
// console.log(fullTripDetails[i], 'ori')
}
else if(i ==destIndex) {
destination = location;
// console.log(fullTripDetails[i],'dest')
}
else {
waypts.push({location: location, stopover: true});
}
}
return {
origin: origin,
destination: destination,
waypts: waypts
};
}
getDirections() {
// console.log('get directions')
const DirectionsService = new window.google.maps.DirectionsService();
const DirectionsServiceTwo = new window.google.maps.DirectionsService();
if(this.state.directionDetails.origin){
DirectionsService.route({
origin: this.state.directionDetails.origin,
destination: this.state.directionDetails.destination,
travelMode: window.google.maps.TravelMode.DRIVING,
waypoints: this.state.directionDetails.waypts,
optimizeWaypoints: true,
}, (result, status) => {
if (status === window.google.maps.DirectionsStatus.OK) {
this.setState({
directionsTwo: result,
});
console.log('reuslt 1: ', result)
} else {
console.error(`error fetching directions ${result}`);
}
});
DirectionsServiceTwo.route({
origin: this.state.directionDetailsTwo.origin,
destination: this.state.directionDetailsTwo.destination,
travelMode: window.google.maps.TravelMode.DRIVING,
waypoints: this.state.directionDetailsTwo.waypts,
optimizeWaypoints: true,
}, (result, status) => {
if (status === window.google.maps.DirectionsStatus.OK) {
this.setState({
directions: result,
});
console.log('reuslt 2: ', result)
} else {
console.error(`error fetching directions ${result}`);
}
});
console.log('loop thru directions two!')
}
}
render() {
console.log('map re-renderred')
const DirectionsGoogleMap = withGoogleMap(props => (
<GoogleMap
defaultZoom={7}
defaultCenter={this.state.center}
>
{this.state.directions && <DirectionsRenderer directions={this.state.directions} />}
{this.state.directions && <DirectionsRenderer directions={this.state.directionsTwo} />}
</GoogleMap>
));
// this.getDirections();
return (
<DirectionsGoogleMap
containerElement={
<div style={{ height: `100%` }} />
}
mapElement={
<div style={{ height: `100%` }} />
}
center={this.state.directionDetails.origin}
/>
);
}
}<file_sep>import AppDispatcher from '../dispatchers/AppDispatcher.jsx';
import UserConstants from '../constants/UserConstants.jsx';
import createBrowserHistory from '../services/History.jsx';
// import createHistory from 'history/createBrowserHistory'
export default {
loginUser: (token) => {
//var savedToken = localStorage.getItem('user_token');
AppDispatcher.dispatch({
actionType: UserConstants.LOGIN_USER,
token: token
});
localStorage.setItem('user_token', token);
// this won't work with onEnter hook in Routes
// const history = createHistory()
// // Get the current location.
// const location = history.location
// // Listen for changes to the current location.
// const unlisten = history.listen((location, action) => {
// // location is an object like window.location
// console.log(action, location.pathname, location.state)
// })
// // Use push, replace, and go to navigate around.
// history.push('/dashboard')
// unlisten();
// console.log(localStorage);
},
logoutUser: () => {
createBrowserHistory.replaceState(null, '/');
localStorage.removeItem('user_token');
AppDispatcher.dispatch({
actionType: UserConstants.LOGOUT_USER
})
},
loadUserDetail: (data) => {
console.log('data in action: ', data, typeof data)
AppDispatcher.dispatch({
actionType: UserConstants.LOAD_USER_DETAIL,
user: data
});
}
}<file_sep>var React = require('react');
var ReactDOM = require('react-dom');
var SimpleMDEReact = require('react-simplemde-editor');
var Editor = require('./Editor');
let counter = 1;
module.exports = React.createClass({
getInitialState() {
return {
textValue1: "I am the initial value. Erase me, or try the button above.",
textValue2: "Focus this text area and then use the Up and Down arrow keys to see the `extraKeys` prop in action"
}
},
extraKeys() {
return {
Up: function(cm) {
cm.replaceSelection(" surprise. ");
},
Down: function(cm) {
cm.replaceSelection(" surprise again! ");
}
};
},
handleChange1(value) {
this.setState({
textValue1: value
});
},
handleChange2(value) {
this.setState({
textValue2: value
});
},
handleTextChange() {
this.setState({
textValue1: `Changing text by setting new state. ${counter++}`
});
},
render() {
//因为你两个library噶classname messup 左
return (
<div>
<Editor
value={this.state.textValue1}
handleEditorChange={this.handleChange1}
/>
</div>
)
}
});<file_sep>import UserConstants from '../constants/UserConstants.jsx';
import BaseStore from './BaseStore.jsx';
class UserStore extends BaseStore {
constructor() {
super();
this.subscribe(() => this._registerToActions.bind(this));
this._token = null;
this._user = null;
}
_registerToActions(action) {
switch(action.actionType) {
case UserConstants.LOGIN_USER:
this.setUserToken(action.token);
break;
case UserConstants.LOGOUT_USER:
this.removeUserInfo();
break;
case UserConstants.LOAD_USER_DETAIL:
this.setUserDetail(action.user);
break;
default:
break;
}
}
setUserToken(token) {
this._token = token;
this.emitChange();
}
setUserDetail(user) {
this._user = user;
this.emitChange();
}
removeUserInfo() {
this._user = null;
this._token = null;
this.emitChange();
}
get user() {
return this._user;
}
get token() {
return this._token;
}
isLoggedIn() {
return !!this._token;
}
}
export default new UserStore();<file_sep>import React, {PropTypes} from 'react';
import FloatingActionButton from 'material-ui/FloatingActionButton';
import ActionDone from 'react-material-icons/icons/action/done';
const FullTripConfirmButton = ({onFullTripConfirm}) => (
<div>
<FloatingActionButton mini={true} onClick={onFullTripConfirm}>
<ActionDone />
</FloatingActionButton>
</div>
);
FullTripConfirmButton.propTypes = {
onFullTripConfirm: PropTypes.func.isRequired,
};
export default FullTripConfirmButton;
<file_sep>import RaisedButton from 'material-ui/RaisedButton';
import React, {PropTypes} from 'react';
// encodeURIComponent(myUrl)
const FullTripUserSubmitButton = ({onFullTripUserSubmit}) => {
return (
<div>
<RaisedButton label="Save the Full Trip" primary={true} onClick={onFullTripUserSubmit}/>
</div>
)
}
FullTripUserSubmitButton.propTypes = {
onFullTripUserSubmit: PropTypes.func.isRequired,
};
export default FullTripUserSubmitButton<file_sep>import React from 'react';
// import MobileTearSheet from '../../../MobileTearSheet';
import {List, ListItem, makeSelectable} from 'material-ui/List';
import Avatar from 'material-ui/Avatar';
import {grey400} from 'material-ui/styles/colors';
import IconButton from 'material-ui/IconButton';
import MoreVertIcon from 'material-ui/svg-icons/navigation/more-vert';
import IconMenu from 'material-ui/IconMenu';
import MenuItem from 'material-ui/MenuItem';
import {Tabs, Tab} from 'material-ui/Tabs';
// From https://github.com/oliviertassinari/react-swipeable-views
import SwipeableViews from 'react-swipeable-views';
export default class FullTripList extends React.Component {
constructor(props) {
super(props);
this.props.getTapName(this.props.tripLocationIds[0])
this.state = {
slideIndex: 0,
};
}
handleChange = (value) => {
this.props.getTapName(this.props.tripLocationIds[value]);
this.setState({
slideIndex: value,
});
}
render() {
const iconButtonElement = (
<IconButton
touch={true}
tooltip="more"
tooltipPosition="bottom-left"
>
<MoreVertIcon color={grey400} />
</IconButton>
);
const rightIconMenu =(poiId,poiName,tripLocationId) => {
return (
<IconMenu iconButtonElement={iconButtonElement}>
<MenuItem onClick={() => this.props.onSuggestEvent(poiId, poiName, tripLocationId)}>Resuggest</MenuItem>
<MenuItem onClick={() => this.props.onDeleteEvent(poiId, poiName, tripLocationId)}>Delete</MenuItem>
</IconMenu>
)
};
let SelectableList = makeSelectable(List);
var tabLis = [];
var fullWrap = [];
var selectList = [];
var maxDays = Math.max.apply(Math, this.props.fullTripDetails.map(function(trip){ return trip.day; }));
// console.log('full trip', maxDays, this.props.fullTripDetails)
for (var i=0; i<maxDays+1; i++) {
tabLis.push(
<Tab
key={this.props.tripLocationIds[i]}
label={'Day '+(i+1).toString()}
value={i} />
);
var fullDetails = [];
for (var j=0; j<this.props.fullTripDetails.length; j++) {
if (this.props.fullTripDetails[j].day === i){
let id = this.props.fullTripDetails[j].id;
let keyId = '';
let primaryTextName = '';
let secondaryTextaddress = '';
let avatarUrl = '';
if (this.props.updateSuggestEvent.hasOwnProperty(id)) {
keyId = this.props.updateSuggestEvent[id].id;
primaryTextName = decodeURI(this.props.updateSuggestEvent[id].name);
secondaryTextaddress = this.props.updateSuggestEvent[id].address;
// needs to update new img url:
avatarUrl = this.props.updateSuggestEvent[id].icon_url
} else {
keyId = this.props.fullTripDetails[j].id;
primaryTextName = decodeURI(this.props.fullTripDetails[j].name);
secondaryTextaddress = this.props.fullTripDetails[j].address;
avatarUrl = this.props.fullTripDetails[j].icon_url
}
fullDetails.push(
<ListItem
key={keyId}
value={j}
primaryText={primaryTextName}
secondaryText={
<p>
{secondaryTextaddress}
</p>
}
secondaryTextLines={2}
rightIconButton={rightIconMenu(this.props.fullTripDetails[j].id, this.props.fullTripDetails[j].name, this.props.tripLocationIds[i])}
leftAvatar={<Avatar src={avatarUrl} />} />
);
fullWrap[i] = fullDetails;
}
}
selectList.push(<div key={i}>
<SelectableList defaultValue={3}>
{fullWrap[i]}
</SelectableList>
</div>);
};
return (
<div>
<Tabs
onChange={this.handleChange}
value={this.state.slideIndex}
>
{tabLis}
</Tabs>
<SwipeableViews
index={this.state.slideIndex}
onChangeIndex={this.handleChange}
>
{selectList}
</SwipeableViews>
</div>
);
}
}
<file_sep># -*- coding: utf-8 -*-
from __future__ import unicode_literals
# from snippets.models import Snippet
# from snippets.serializers import SnippetSerializer
from rest_framework import generics, status
from django.contrib.auth.models import User
from angeltrip.serializers import UserSerializer, FullTripSearchSerializer, \
OutsideTripSearchSerializer,CityStateSearchSerializer, FullTripSuggestDeleteSerializer, \
FullTripAddSearchSerializer, FullTripAddEventSerializer, FullTripSuggestConfirmSerializer, \
IPGeoLocationSerializer, OutsideTripAddSearchSerializer
from rest_framework import permissions
from angeltrip.permissions import IsOwnerOrReadOnly, IsStaffOrTargetUser
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import renderers
from rest_framework import viewsets
from rest_framework.decorators import detail_route
from rest_framework.views import APIView
from city_trip import get_fulltrip_data
from helpers import *
from outside_trip import outside_trip_poi
from outside_helpers import *
from rest_framework.permissions import AllowAny
# from django.contrib.auth import get_user_model # If used custom user model
from django.views.decorators.csrf import csrf_exempt
import trip_update
'''
Get Token:
http post http://127.0.0.1:8000/account/get_auth_token/ username=test password=<PASSWORD>
Get outside Trip:
http get 'http://127.0.0.1:8000/outside_trip_search/?city=San_Diego&state=California&direction=N&n_days=1'
'''
@api_view(['GET'])
def api_root(request, format=None):
return Response({
'full-trips': reverse('full-trip-list', request=request, format=format),
'users': reverse('user-list', request=request, format=format)
})
@api_view(['POST'])
def create_auth(request):
'''
http post http://127.0.0.1:8000/account/register password=<PASSWORD>1234 username=test3 email=''
'''
serialized = UserSerializer(data=request.data)
permission_classes = [AllowAny]
if serialized.is_valid():
User.objects.create_user(
email=serialized.data['email'], username=serialized.data['username'], password=serialized.data['password']
)
return Response(serialized.data, status=status.HTTP_201_CREATED)
else:
return Response(serialized._errors, status=status.HTTP_400_BAD_REQUEST)
class FullTripDetail(APIView):
def get(self, request, full_trip_id):
valid_full_trip = check_full_trip_id(full_trip_id)
if not valid_full_trip:
return Response({
"error_trip_id": '%s is not a valid full trip id.' % (full_trip_id),
}, status=400)
full_trip_id, full_trip_details, trip_location_ids = get_exisiting_full_trip_details(full_trip_id)
return Response({
"full_trip_id": full_trip_id,
"full_trip_details": full_trip_details,
"trip_location_ids": trip_location_ids,
})
class OutsideTripDetail(APIView):
def get(self, request, outside_trip_id):
valid_outside_trip = check_outside_trip_id(outside_trip_id)
if not valid_outside_trip:
return Response({
"error_trip_id": '%s is not a valid full trip id.' % (outside_trip_id),
}, status=400)
outside_trip_id, outside_trip_details, trip_location_ids = get_exisiting_outside_trip_details(outside_trip_id)
return Response({
"outside_trip_id": outside_trip_id,
"outside_trip_details": outside_trip_details,
"trip_location_ids": trip_location_ids,
})
class FullTripSearch(APIView):
def get_permissions(self):
'''
myurl = 'http://127.0.0.1:8000/full_trip_search/?state=California&city=San_Francisco&n_days=1'
response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
response.json()
'''
# return (permissions.IsAuthenticated()),
return (AllowAny() if self.request.method == 'GET'
else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = FullTripSearchSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
city = data["city"]
state = data["state"]
n_days = data["n_days"]
# state = abb_to_full_state(state)
checked_state = check_state(state)
if not checked_state:
return Response({
"error_location": '%s is not a valid state name' % (state),
}, status=400)
valid_city = check_valid_city(city, checked_state)
if not valid_city:
return Response({
"error_location": '%s is not valid city name for state %s' % (city, checked_state),
}, status=400)
full_trip_id, full_trip_details, trip_location_ids = get_fulltrip_data(state=checked_state, city=city, n_days=n_days)
return Response({
"full_trip_id": full_trip_id,
"full_trip_details": full_trip_details,
"trip_location_ids": trip_location_ids,
})
class OutsideTripSearch(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = OutsideTripSearchSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
city = data["city"].replace('_',' ').title()
state = data["state"].replace('_',' ').title()
direction = data["direction"].upper()
checked_state = check_state(state)
if not checked_state:
return Response({
"error_location": '%s is not a valid state name' %(state),
}, status=400)
valid_city = check_valid_city(city, checked_state)
if not valid_city:
return Response({
"error_location": '%s is not valid city name for state %s' %(city, checked_state),
}, status=400)
# print 'outsdie trip: ', city, state, direction
outside_trip_id, outside_trip_details, outside_route_ids_list = outside_trip_poi(origin_city=city, origin_state=checked_state, target_direction=direction, full_day=True, regular=True, debug=True, username_id=1)
if not outside_trip_details or not outside_route_ids_list:
return Response({
"error_no_poi": 'direction %s of %s has no interesting place to go, please choose another direction' % (direction ,city),
}, status=400)
return Response({
"outside_trip_id": outside_trip_id,
"outside_trip_details": outside_trip_details,
"outside_route_ids_list": outside_route_ids_list
})
class CityStateSearch(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = CityStateSearchSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
city_state = data["city_state"]
city_state = serach_city_state(city_state)
city = [i[0] for i in city_state]
state = [i[1] for i in city_state]
city_and_state = [i[-1] for i in city_state]
return Response({
"city_state": city_and_state,
"city": city,
"state": state
})
class FullTripDeleteEvent(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = FullTripSuggestDeleteSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
full_trip_id=data["full_trip_id"]
event_id = data["event_id"]
trip_location_id = data["trip_location_id"]
username_id = 1
new_full_trip_id, new_full_trip_details, new_trip_location_ids, current_trip_location_id = trip_update.remove_event(full_trip_id, trip_location_id, event_id, username_id)
print 'trip details after delete event: ', new_full_trip_id, new_full_trip_details, new_trip_location_ids, current_trip_location_id
return Response({
"full_trip_id": new_full_trip_id,
"full_trip_details": new_full_trip_details,
"trip_location_ids": new_trip_location_ids,
"current_trip_location_id": current_trip_location_id
})
class FullTripAddSearch(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = FullTripAddSearchSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
full_trip_id=data["full_trip_id"]
poi_name = data["poi_name"]
trip_location_id = data["trip_location_id"]
poi_dict, poi_names = trip_update.add_search_event(poi_name, trip_location_id)
print 'welcome to add your search :)', poi_names, poi_dict
return Response({
"poi_dict": poi_dict,
"poi_names": poi_names,
})
class FullTripAddEvent(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = FullTripAddEventSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
full_trip_id=data["full_trip_id"]
poi_name = data["poi_name"]
poi_id = data["poi_id"] if data["poi_id"] != 'undefined' else None
trip_location_id = data["trip_location_id"]
old_trip_location_id,new_trip_location_id, new_day_details = trip_update.add_event_day_trip(poi_id, poi_name, trip_location_id, full_trip_id)
full_trip_id, trip_location_ids, full_trip_details = trip_update.add_event_full_trip(full_trip_id, old_trip_location_id, new_trip_location_id, new_day_details)
print 'submit your add event :)', full_trip_id, trip_location_ids, full_trip_details
return Response({
"full_trip_details": full_trip_details,
"full_trip_id": full_trip_id,
"trip_location_ids": trip_location_ids,
"current_trip_location_id": new_trip_location_id,
})
class FullTripSuggestArray(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = FullTripSuggestDeleteSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
full_trip_id=data["full_trip_id"]
event_id = data["event_id"]
trip_location_id = data["trip_location_id"]
username_id = 1
suggest_event_array = trip_update.suggest_event_array(full_trip_id, trip_location_id, event_id, username_id)
if not suggest_event_array:
return Response({
"error_no_suggestion": 'No other place in the near area as good as this place'
}, status=400)
return Response({
"suggest_event_array": suggest_event_array,
})
class FullTripSuggestConfirm(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def post(self, request):
# Validate the incoming input (provided through query parameters)
# serializer = FullTripSuggestConfirmSerializer(data=request.data)
# serializer.is_valid(raise_exception=True)
# Get the model input
data = request.data
print data, 'bug??'
full_trip_id=data["fullTripId"]
update_suggest_event = data["updateSuggestEvent"]
update_trip_location_id = data["updateTripLocationId"]
print full_trip_id,update_trip_location_id
print 'my boi: ', update_suggest_event
username_id = 1
new_full_trip_id, new_full_trip_details, full_trip_trip_locations_id, new_update_trip_location_id = trip_update.switch_suggest_event(full_trip_id, update_trip_location_id, update_suggest_event, username_id)
return Response({
"full_trip_id": new_full_trip_id,
"full_trip_details": new_full_trip_details,
"trip_location_ids": full_trip_trip_locations_id,
"current_trip_location_id": new_update_trip_location_id,
})
class FullTripCreate(APIView):
def get_permissions(self):
'''
response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
'''
return (permissions.IsAuthenticated()),
def post(self, request):
# Validate the incoming input (provided through query parameters)
# serializer = FullTripSuggestConfirmSerializer(data=request.data)
# serializer.is_valid(raise_exception=True)
# Get the model input
data = request.data
username = request.user.username
username_id = User.objects.get(username=username).pk
full_trip_id= data["fullTripId"]
response = trip_update.create_full_trip(full_trip_id, username_id)
return Response({
"response": response,
})
class OutsideTripAddSearch(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = OutsideTripAddSearchSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
outside_trip_id = data["outside_trip_id"]
poi_name = data["poi_name"]
outside_route_id = data["outside_route_id"]
# poi_dict, poi_names = trip_update.outside_add_search_event(poi_name, outside_route_id)
a, b= trip_update.outside_add_search_event(poi_name, outside_route_id)
return Response({
})
# print 'welcome to add your search :)', poi_names, poi_dict
# return Response({
# "poi_dict": poi_dict,
# "poi_names": poi_names,
# })
class OutsideTripAddEvent(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
# return (AllowAny() if self.request.method == 'POST'
# else permissions.IsAuthenticated()),
def get(self, request):
# Validate the incoming input (provided through query parameters)
serializer = FullTripAddEventSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
full_trip_id=data["full_trip_id"]
poi_name = data["poi_name"]
poi_id = data["poi_id"] if data["poi_id"] != 'undefined' else None
trip_location_id = data["trip_location_id"]
old_trip_location_id,new_trip_location_id, new_day_details = trip_update.add_event_day_trip(poi_id, poi_name, trip_location_id, full_trip_id)
full_trip_id, trip_location_ids, full_trip_details = trip_update.add_event_full_trip(full_trip_id, old_trip_location_id, new_trip_location_id, new_day_details)
print 'submit your add event :)', full_trip_id, trip_location_ids, full_trip_details
return Response({
"full_trip_details": full_trip_details,
"full_trip_id": full_trip_id,
"trip_location_ids": trip_location_ids,
"current_trip_location_id": new_trip_location_id,
})
class IPGeoLocation(APIView):
# def get_permissions(self):
# '''
# response = requests.get(myurl, headers={'Authorization': 'Token {}'.format(mytoken)})
# '''
# return (permissions.IsAuthenticated()),
def get(self,request):
# Validate the incoming input (provided through query parameters)
# serializer = FullTripSuggestConfirmSerializer(data=request.data)
# serializer.is_valid(raise_exception=True)
# Get the model input
print 'ok?'
serializer = IPGeoLocationSerializer(data=request.query_params)
serializer.is_valid(raise_exception=True)
# Get the model input
data = serializer.validated_data
print 'ip data: ', data, data['ip']
country_code, country_name, region_name, city_name = find_ip_geo_location(data['ip'])
return Response({
"country_code": country_code,
"country_name": country_name,
"region_name": region_name,
"city_name": city_name
})
class UserViewSet(viewsets.ReadOnlyModelViewSet):
"""
This viewset automatically provides `list` and `detail` actions.
"""
queryset = User.objects.all()
serializer_class = UserSerializer
# class UserView(viewsets.ModelViewSet):
# serializer_class = UserSerializer
# model = User
# def get_permissions(self):
# # allow non-authenticated user to create via POST
# return (AllowAny() if self.request.method == 'POST'
# else IsStaffOrTargetUser()),
# @api_view(['POST'])
# @csrf_exempt
# class CreateUserView(generics.CreateAPIView):
# model = User
# permission_classes = [
# AllowAny # Or anon users can't register
# ]
# serializer_class = UserSerializer
<file_sep>import React, {Component} from 'react';
import { withGoogleMap, GoogleMap, DirectionsRenderer } from "react-google-maps";
export default class GoogleMapOutsideTrip extends Component {
constructor(props) {
super(props);
this.state = {
directions: null,
center: null,
directionDetails: {},
mapUrl: '',
};
this.getWaypts = this.getWaypts.bind(this)
this.getDirections = this.getDirections.bind(this)
}
componentWillMount() {
this.setState({directionDetails: this.getWaypts(this.props.outsideTripDetails)});
}
componentWillReceiveProps(nextProps) {
if (nextProps.outsideTripDetails !== this.props.outsideTripDetails) {
this.setState({directionDetails: this.getWaypts(nextProps.outsideTripDetails)});
}
}
componentDidMount() {
this.getDirections();
}
componentDidUpdate(prevProps, prevState) {
if (prevProps.outsideTripDetails !== this.props.outsideTripDetails) {
this.getDirections();
}
}
// shouldComponentUpdate(nextProps,nextState) {
// const differentDirectionDetails = nextState.directionDetails !== this.state.directionDetails;
// const differentOutsideTripDetails = nextProps.outsideTripDetails !== this.props.outsideTripDetails;
// console.log('different: ', differentDirectionDetails || differentOutsideTripDetails, this.state.directionDetails, nextState.directionDetails );
// return differentDirectionDetails || differentOutsideTripDetails;
// return true
// }
getWaypts = function(outsideTripDetails) {
let waypts = [];
const tripLength = outsideTripDetails.length;
const origin = this.props.origin_location;
const originUrl = '&origin=' + encodeURIComponent(origin);
let location = '';
let destination = '';
let mapWayptUrl = 'https://www.google.com/maps/dir/?api=1&travelmode=driving';
let mapWaypts = [];
let destUrl = '';
for (let i = 0; i < tripLength; i++){
if (outsideTripDetails[i].check_full_address === 0){
location = outsideTripDetails[i].name + ', ' + outsideTripDetails[i].city + ', ' + outsideTripDetails[i].state
} else {
location = outsideTripDetails[i].address
}
// location = new window.google.maps.LatLng(outsideTripDetails[i].coord_lat, outsideTripDetails[i].coord_long)
if(i === tripLength - 1) {
destination = location;
destUrl = '&destination='+ encodeURIComponent(destination);
} else {
// console.log(location)
waypts.push({location: location, stopover: true});
mapWaypts.push(encodeURIComponent(location));
}
}
const mapWayptsStr = mapWaypts.join('%7C');
mapWayptUrl += originUrl + destUrl + '&waypoints=' + mapWayptsStr;
this.props.getMapUrl(mapWayptUrl);
const center = new window.google.maps.LatLng(outsideTripDetails[0].coord_lat, outsideTripDetails[0].coord_long)
return {
center: center,
origin: origin,
destination: destination,
waypts: waypts
};
}
getDirections() {
// console.log('get directions')
const _this = this;
const DirectionsService = new window.google.maps.DirectionsService();
if(this.state.directionDetails.origin){
DirectionsService.route({
origin: this.state.directionDetails.origin,
destination: this.state.directionDetails.destination,
travelMode: window.google.maps.TravelMode.DRIVING,
waypoints: this.state.directionDetails.waypts,
optimizeWaypoints: false,
}, (result, status) => {
if (status === window.google.maps.DirectionsStatus.OK) {
_this.setState({
directions: result,
});
console.log('reuslt: ', result)
} else {
console.error('error fetching directions', result);
}
});
}
}
render() {
console.log('map re-renderred')
const _this = this;
const DirectionsGoogleMap = withGoogleMap(props => (
<GoogleMap
defaultZoom={7}
defaultCenter={_this.state.directionDetails.center}
>
{this.state.directions && <DirectionsRenderer directions={this.state.directions} />}
</GoogleMap>
));
return (
<DirectionsGoogleMap
containerElement={
<div style={{ height: `100%` }} />
}
mapElement={
<div style={{ height: `100%` }} />
}
/>
);
}
}<file_sep>import React from 'react';
import Auth from '../services/AuthService.jsx'
// import {Link} from 'react-router';
import UserActions from '../actions/UserActions.jsx';
import UserStore from '../stores/UserStore.jsx';
import UserConstants from '../constants/UserConstants';
import axios from 'axios';
class UserDetailPage extends React.Component {
constructor() {
super();
this.state = {
userData: {}
};
this.fetchMessage = this.fetchMessage.bind(this);
// need .bind(this), otherwise this will be UserStore
// this.loadUserDetail = this.loadUserDetail.bind(this);
this.tableStyles = {
fontSize: "14px"
}
}
fetchMessage() {
const config = {
headers: { Authorization: 'Token ' + localStorage.getItem('user_token')}
};
axios.get(UserConstants.USER_DETAIL_URL, config)
.then(response => {
/* console.log(response);*/
this.setState({
userData: response.data
});
console.log("userdata: ", this.state.userData)
});
}
componentWillMount() {
this.fetchMessage();
}
// componentWillReceiveProps(nextProps) {
// }
// componentDidMount() {
// }
// componentDidUpdate(prevProps, prevState) {
// }
// shouldComponentUpdate(nextProps,nextState) {
// }
render() {
console.log('detail render', this.state.user, UserStore.token, localStorage)
return (
<div className="container jumbotron">
<h2>User Detail</h2>
</div>
)
}
}
export default UserDetailPage;<file_sep>import React from 'react';
import { Card, CardTitle, CardActions } from 'material-ui/Card';
import SearchInputField from '../components/SearchInputField.jsx';
import MenuItemDays from '../components/MenuItemDays.jsx';
import FullTripSearchButton from '../components/FullTripSearchButton.jsx';
import FullTripList from '../components/FullTripList.jsx';
import FullTripAddEventButton from '../components/FullTripAddEventButton.jsx';
import FullTripResetButton from '../components/FullTripResetButton.jsx';
import FullTripConfirmButton from '../components/FullTripConfirmButton.jsx';
import DirectionsTrip from '../components/GoogleMapComponent.jsx';
import ReactMaterialUiNotifications from 'react-materialui-notifications/lib/ReactMaterialUiNotifications'
// import FullDirectionsTrip from '../components/GoogleMapFullTripComponent.jsx';
import GoogleMapUrlButton from '../components/GoogleMapUrlButton.jsx';
import FullTripUserSubmitButton from '../components/FullTripUserSubmitButton.jsx';
import UserStore from '../stores/UserStore.jsx';
import TripConstants from '../constants/TripConstants';
import $ from 'jquery';
// Version B: Delete method showed in front end only, dont update the backend until final click. Beter for performance!
// add_search event use local search instead of calling backend for updates.!
// alot to updates...>__<
// Version C: update backend for the add event order or use front end to do so
// Bug to be fixed: full trip list disappear when prev state with trip_days >1, tab on day 2 or larger
// and changes trip_days lower to 1.
const divStyle = {
width: '100%',
height: '400px',
};
class HomePage extends React.Component {
/**
* Class constructor.
*/
constructor(props, context) {
super(props, context);
// set the initial component state
this.state = {
place: "",
days: "",
cityStateDataSource: [],
addEventDataSource: [],
poiDict: {},
searchInputValue: '',
searchEventValue: '',
daysValue: '1',
fullTripDetails: [],
fullTripId: '',
tripLocationIds: [],
cloneFullTripDetails: [],
updateEventId: '',
updateEventName: '',
updateTripLocationId: '',
suggestEventArr: {},
updateSuggestEvent: {},
currentMapUrl: '',
newFullTrip: '',
errorsCityState: {},
errors: {},
};
this.performSearch = this.performSearch.bind(this)
this.onUpdateInput = this.onUpdateInput.bind(this)
this.handleDaysOnChange = this.handleDaysOnChange.bind(this)
this.onFullTripSubmit = this.onFullTripSubmit.bind(this)
this.onFullTripUserSubmit = this.onFullTripUserSubmit.bind(this)
this.onDeleteEvent = this.onDeleteEvent.bind(this)
this.onSuggestEvent = this.onSuggestEvent.bind(this)
this.onFullTripReset = this.onFullTripReset.bind(this)
this.onFullTripConfirm = this.onFullTripConfirm.bind(this)
this.performDeleteEventId = this.performDeleteEventId.bind(this)
this.performSuggestEventLst = this.performSuggestEventLst.bind(this)
this.onAddEventInput = this.onAddEventInput.bind(this)
this.getTapName = this.getTapName.bind(this)
this.getMapUrl = this.getMapUrl.bind(this)
this.onAddEventSubmit = this.onAddEventSubmit.bind(this)
}
performSearch() {
// const dbLocationURI = 'http://127.0.0.1:8000/city_state_search/?city_state=';
const _this = this;
const valid_input = encodeURIComponent(_this.state.searchInputValue);
const cityStateSearchUrl = TripConstants.SEARCH_CITY_STATE_URL + valid_input;
if(_this.state.searchInputValue !== '') {
console.log(cityStateSearchUrl);
$.ajax({
type: "GET",
url: cityStateSearchUrl,
}).done(function(res) {
_this.setState({
cityStateDataSource : res.city_state,
});
});
};
}
onUpdateInput(searchInputValue) {
this.setState({
searchInputValue,
},function(){
this.performSearch();
});
}
handleDaysOnChange = (event, index, value) => this.setState({ daysValue: event.target.innerText});
onFullTripSubmit = () => {
// const dbLocationURI = 'http://127.0.0.1:8000/full_trip_search/?';
const _this = this;
const city = this.state.searchInputValue.split(',')[0];
const state = this.state.searchInputValue.split(',')[1];
const fullTripSearchUrl = TripConstants.SEARCH_FULL_TRIP_URL + 'city=' + encodeURIComponent(city) + '&state='+ encodeURIComponent(state) + '&n_days='+ _this.state.daysValue;
console.log('fulltrip url: ', fullTripSearchUrl)
if(_this.state.searchInputValue !== '') {
$.ajax({
type: "GET",
url: fullTripSearchUrl,
}).done(function(res) {
_this.setState({
fullTripDetails : res.full_trip_details,
fullTripId: res.full_trip_id,
tripLocationIds: res.trip_location_ids,
updateTripLocationId: res.trip_location_ids[0],
addEventDataSource: [],
poiDict: {},
searchEventValue: '',
errorsCityState: {},
});
}).fail(function(res, status, errorThrown){
_this.setState({
errorsCityState: JSON.parse(res.responseText)
});
});
};
}
//may want to reset!
performDeleteEventId() {
const { fullTripId, updateEventId, updateTripLocationId } = this.state;
// const myUrl = 'http://127.0.0.1:8000/update_trip/delete/?full_trip_id=' + fullTripId +
// '&event_id=' + updateEventId +
// '&trip_location_id='+ updateTripLocationId;
const updateFullTripDeletePoiUrl = TripConstants.UPDATE_FULL_TRIP_DELETE_POI_URL + fullTripId + '&event_id=' + updateEventId + '&trip_location_id='+ updateTripLocationId;
const _this = this;
console.log('delete event id: ',updateFullTripDeletePoiUrl)
if(updateEventId !== '') {
console.log('delete event id: ',updateFullTripDeletePoiUrl);
$.ajax({
type: "GET",
url: updateFullTripDeletePoiUrl,
}).done(function(res) {
_this.setState({
fullTripDetails : res.full_trip_details,
fullTripId: res.full_trip_id,
tripLocationIds: res.trip_location_ids,
updateTripLocationId: res.current_trip_location_id,
updateEventId: '',
});
});
};
}
onDeleteEvent(updateEventId, updateEventName, updateTripLocationId) {
this.setState({
updateEventId,
updateEventName,
updateTripLocationId
},this.performDeleteEventId);
}
onSuggestEvent(updateEventId, updateEventName, updateTripLocationId) {
if (this.state.suggestEventArr.hasOwnProperty(updateEventId)) {
const suggestEventArrLength = Object.keys(this.state.suggestEventArr).length
const randomSuggestEventArrIdx = Math.floor(Math.random()*suggestEventArrLength)
const suggestEvent = this.state.suggestEventArr[randomSuggestEventArrIdx];
const updateSuggestEvent = Object.assign({}, this.state.updateSuggestEvent, {[this.state.updateEventId]:suggestEvent});
const errors = {};
this.setState({
updateEventId,
// updateTripLocationId: updateTripLocationId,
updateEventName,
updateSuggestEvent,
errors,
});
} else {
this.setState({
updateEventId,
updateEventName,
// updateTripLocationId
}, this.performSuggestEventLst);
}
}
performSuggestEventLst(){
// const myUrl = 'http://127.0.0.1:8000/update_trip/suggest_search/?full_trip_id=' + this.state.fullTripId +
// '&event_id=' + this.state.updateEventId +
// '&trip_location_id='+this.state.updateTripLocationId;
const updateFullTripSuggestPoiUrl = TripConstants.UPDATE_FULL_TRIP_SUGGEST_POI_URL + this.state.fullTripId + '&event_id=' + this.state.updateEventId + '&trip_location_id='+this.state.updateTripLocationId;
const _this = this;
console.log('suggest poi url: ', updateFullTripSuggestPoiUrl)
if(_this.state.updateEventId !== '') {
$.ajax({
type: "GET",
url: updateFullTripSuggestPoiUrl,
}).done(function(res) {
let suggestEventArr = Object.assign({}, _this.state.suggestEventArr[_this.state.updateEventId], res.suggest_event_array);
let suggestEvent = suggestEventArr[Math.floor(Math.random()*Object.keys(suggestEventArr).length)];
let updateSuggestEvent = Object.assign({}, _this.state.updateSuggestEvent, {[_this.state.updateEventId]:suggestEvent});
_this.setState({
suggestEventArr: suggestEventArr,
updateSuggestEvent: updateSuggestEvent,
});
}).fail(function(res, status, errorThrown){
const errors = JSON.parse(res.responseText);
ReactMaterialUiNotifications.showNotification({
title: _this.state.updateEventName,
additionalText: errors.error_no_suggestion,
});
_this.setState({
errors: errors
});
});
};
}
onFullTripReset(){
this.setState({
updateSuggestEvent: {}
})
}
onFullTripConfirm(){
// const suggestConfirmUrl = 'http://127.0.0.1:8000/update_trip/suggest_confirm/';
const suggestConfirmUrl = TripConstants.UPDATE_FULL_TRIP_SUGGEST_CONFIRM_URL;
const _this = this;
let data = {
updateSuggestEvent: JSON.stringify(this.state.updateSuggestEvent),
fullTripId: this.state.fullTripId,
updateTripLocationId: this.state.updateTripLocationId,
};
$.ajax({
type: 'POST',
url: suggestConfirmUrl,
data: data
})
.done(function(res) {
_this.setState({
updateSuggestEvent: '',
fullTripDetails: res.full_trip_details,
fullTripId: res.full_trip_id,
tripLocationIds: res.trip_location_ids,
updateEventId: '',
updateTripLocationId: res.current_trip_location_id,
})
})
.fail(function(jqXhr) {
console.log('failed to register');
});
}
performAddEventSearch() {
// const dbLocationURI = 'http://127.0.0.1:8000/update_trip/add_search/?poi_name=';
const _this = this;
const validInput = encodeURIComponent(this.state.searchEventValue);
const addPoiSearchUrl = TripConstants.UPDATE_FULL_TRIP_ADD_POI_SEARCH_URL + validInput + '&trip_location_id=' + _this.state.updateTripLocationId + '&full_trip_id=' + _this.state.fullTripId;
if(_this.state.searchEventValue !== '') {
console.log('add search url: ', addPoiSearchUrl);
$.ajax({
type: "GET",
url: addPoiSearchUrl,
}).done(function(res) {
_this.setState({
addEventDataSource : res.poi_names,
poiDict: res.poi_dict,
});
});
};
}
onAddEventInput(searchEventValue) {
this.setState({
searchEventValue,
},function(){
this.performAddEventSearch();
});
}
getTapName(updateTripLocationId) {
this.setState({
updateTripLocationId: updateTripLocationId,
addEventDataSource: [],
searchEventValue: '',
});
}
getMapUrl(currentMapUrl) {
console.log('the currentMapUrl: ',currentMapUrl)
this.setState({
currentMapUrl
})
}
onAddEventSubmit = () => {
// const dbLocationURI = 'http://127.0.0.1:8000/update_trip/add/?';
const _this = this;
const poiId = this.state.poiDict[this.state.searchEventValue];
const validPoiName = encodeURIComponent(this.state.searchEventValue);
const addPoiUrl = TripConstants.UPDATE_FULL_TRIP_ADD_POI_URL + 'poi_id=' + poiId + '&poi_name='+ validPoiName +'&full_trip_id='+ this.state.fullTripId + '&trip_location_id='+this.state.updateTripLocationId;
console.log('add submit',addPoiUrl)
if(this.state.searchEventValue !== '') {
$.ajax({
type: "GET",
url: addPoiUrl,
}).done(function(res) {
_this.setState({
fullTripDetails : res.full_trip_details,
fullTripId: res.full_trip_id,
tripLocationIds: res.trip_location_ids,
updateTripLocationId: res.current_trip_location_id,
addEventDataSource: [],
searchEventValue: '',
});
// call a func: map fulltrip detail to clone => cloneFullTripDetails =
});
};
}
// Wrap all `react-google-maps` components with `withGoogleMap` HOC
// and name it GettingStartedGoogleMap
onFullTripUserSubmit = () => {
// const fullTripUrl = 'http://localhost:8000/create_full_trip/';
const fullTripUrl = TripConstants.CREATE_FULL_TRIP_URL
const token = localStorage.getItem('user_token')
// const headers = {
// 'Authorization': 'Token ' + UserStore.token
// }
const headers = {
// 'Authorization': 'Token ' + localStorage.user_token
'Authorization': 'Token ' + token
}
const _this = this;
console.log('headers: ', headers, token)
let data = {
fullTripId: _this.state.fullTripId,
};
// data = JSON.stringify(data)
$.ajax({
type: 'POST',
url: fullTripUrl,
data: data,
headers: headers,
})
.done(function(res) {
_this.setState({
updateSuggestEvent: '',
updateEventId: '',
newFullTrip: res.response,
})
console.log('done creating the full trip!')
})
.fail(function(jqXhr) {
console.log('failed to create the full trip.');
});
}
render() {
return (
<div>
<ReactMaterialUiNotifications
desktop={true}
transitionName={{
leave: 'dummy',
leaveActive: 'fadeOut',
appear: 'dummy',
appearActive: 'zoomInUp'
}}
transitionAppear={true}
transitionLeave={true}
autoHide={3000}
/>
<Card className="container" >
<CardTitle title="Angel Trip!" subtitle="This is the home page." />
<CardActions>
<div className="col-md-8 col-md-offset-2">
<div className="col-md-5">
<SearchInputField
name='searchCityState'
searchText={this.state.searchInputValue}
floatingLabelText='Location'
dataSource={this.state.cityStateDataSource}
onUpdateInput={this.onUpdateInput}
errors={this.state.errorsCityState} />
</div>
<div className="col-md-5">
<MenuItemDays daysValue={this.state.daysValue} handleDaysOnChange={this.handleDaysOnChange}/>
</div>
<div className="col-md-2">
<FullTripSearchButton onFullTripSubmit={this.onFullTripSubmit}/>
</div>
<br/>
<div className="col-md-12 ">
{this.state.fullTripDetails.length>0 &&
<FullTripList
onDeleteEvent={this.onDeleteEvent}
onSuggestEvent={this.onSuggestEvent}
updateSuggestEvent={this.state.updateSuggestEvent}
fullTripDetails={this.state.fullTripDetails}
tripLocationIds={this.state.tripLocationIds}
getTapName={this.getTapName}
/>}
</div>
<div className="col-md-10 col-md-offset-2">
<div className="col-md-5 col-md-offset-1">
{this.state.fullTripDetails.length>0 &&
<SearchInputField
name='searchAddEvent'
searchText={this.state.searchEventValue}
hintText='Add New Event'
inputStyle={{ textAlign: 'center' }}
dataSource={this.state.addEventDataSource}
onUpdateInput={this.onAddEventInput} />}
</div>
<div className="col-md-2">
{this.state.fullTripDetails.length>0 &&
<FullTripAddEventButton onAddEventSubmit={this.onAddEventSubmit}/>}
</div>
<div className="col-md-4">
<div className="col-md-4">
{Object.keys(this.state.updateSuggestEvent).length>0 &&
<FullTripResetButton onFullTripReset={this.onFullTripReset}/>}
</div>
<div className="col-md-4">
{Object.keys(this.state.updateSuggestEvent).length>0 &&
<FullTripConfirmButton onFullTripConfirm={this.onFullTripConfirm}/>}
</div>
</div>
</div>
<div className="col-md-12">
<div style={divStyle}>
{this.state.fullTripDetails.length > 0 && <DirectionsTrip fullTripDetails={this.state.fullTripDetails}
updateTripLocationId={this.state.updateTripLocationId}
tripLocationIds={this.state.tripLocationIds}
getMapUrl={this.getMapUrl} />}
</div>
<br />
<div className="col-md-6">
{this.state.currentMapUrl.length >0 && <GoogleMapUrlButton googleMapUrl={this.state.currentMapUrl} />}
</div>
<div className="col-md-6">
{this.state.currentMapUrl.length >0 && <FullTripUserSubmitButton onFullTripUserSubmit={this.onFullTripUserSubmit} />}
</div>
</div>
</div>
</CardActions>
</Card>
</div>
)
}
};
export default HomePage;
<file_sep>var BASE_URL = 'http://174.129.85.123/';
// var BASE_URL = 'http://localhost:8000/';
var UserConstants = {
BASE_URL: BASE_URL,
LOGIN_URL: BASE_URL + 'rest-auth/login/',
SIGNUP_URL: BASE_URL + 'rest-auth/registration/',
USER_DETAIL_URL: BASE_URL + 'rest-auth/user/',
PASSWORD_CHANGE_URL: BASE_URL + 'rest-auth/password/change/',
PASSWORD_RESET_URL: BASE_URL + 'rest-auth/password/reset/',
LOGIN_USER: 'LOGIN_USER',
LOGOUT_USER: 'LOGOUT_USER',
LOAD_USER_DETAIL: 'LOAD_USER_DETAIL'
};
export default UserConstants;<file_sep>import React from 'react';
// import MobileTearSheet from '../../../MobileTearSheet';
import {List, ListItem, makeSelectable} from 'material-ui/List';
import Subheader from 'material-ui/Subheader';
import AppBar from 'material-ui/AppBar';
import Avatar from 'material-ui/Avatar';
import {grey400} from 'material-ui/styles/colors';
import IconButton from 'material-ui/IconButton';
import MoreVertIcon from 'material-ui/svg-icons/navigation/more-vert';
import IconMenu from 'material-ui/IconMenu';
import MenuItem from 'material-ui/MenuItem';
import NavigationArrowUpward from 'material-ui/svg-icons/navigation/arrow-upward.js';
import {Tabs, Tab} from 'material-ui/Tabs';
// From https://github.com/oliviertassinari/react-swipeable-views
import SwipeableViews from 'react-swipeable-views';
function handleTouchTap() {
alert('onTouchTap triggered on the title component');
}
export default class OutsideTripList extends React.Component {
constructor(props) {
super(props);
}
render() {
let fullDetails = [];
let primaryTextName = '';
let secondaryTextaddress = '';
let keyId = '';
let imgUrl = '';
for(var i=0; i<this.props.outsideRouteDetails.length; i++) {
primaryTextName = this.props.outsideRouteDetails[i].name;
secondaryTextaddress = this.props.outsideRouteDetails[i].address;
keyId = this.props.outsideRouteDetails[i].id;
imgUrl = this.props.outsideRouteDetails[i].icon_url;
fullDetails.push(
<ListItem
key={keyId}
value={i}
primaryText={primaryTextName}
secondaryText={
<p>
{secondaryTextaddress}
</p>
}
secondaryTextLines={2}
leftAvatar={<Avatar src={imgUrl} />} />
);
}
return (
<div style={{marginTop:'20px'}}>
<AppBar
title={this.props.outsideRouteTitle}
onTitleTouchTap={handleTouchTap}
iconElementLeft={<IconButton><NavigationArrowUpward /></IconButton>}
onLeftIconButtonTouchTap={this.props.toOutsideTrip}
/>
<List>
{fullDetails}
</List>
</div>
);
}
}
<file_sep>## AngelTrip
#### Tried to search around for place to go? Here is the solution -- AngelTrip
#### We plan your trip for you wherever you want to go.
<file_sep>// var BASE_URL = 'http://localhost:8000/';
var BASE_URL = 'http://174.129.85.123/';
var TripConstants = {
BASE_URL: BASE_URL,
SEARCH_CITY_STATE_URL: BASE_URL + 'city_state_search/?city_state=',
SEARCH_FULL_TRIP_URL: BASE_URL + 'full_trip_search/?',
SEARCH_OUTSIDE_TRIP_URL: BASE_URL + 'outside_trip_search/?',
UPDATE_FULL_TRIP_DELETE_POI_URL: BASE_URL + 'update_trip/delete/?full_trip_id=',
UPDATE_FULL_TRIP_SUGGEST_POI_URL: BASE_URL + 'update_trip/suggest_search/?full_trip_id=',
UPDATE_FULL_TRIP_SUGGEST_CONFIRM_URL: BASE_URL + 'update_trip/suggest_confirm/',
UPDATE_FULL_TRIP_ADD_POI_SEARCH_URL: BASE_URL + 'update_trip/add_search/?poi_name=',
UPDATE_FULL_TRIP_ADD_POI_URL: BASE_URL + 'update_trip/add/?',
CREATE_FULL_TRIP_URL: BASE_URL + 'create_full_trip/',
CREATE_OUTSIDE_TRIP_URL: BASE_URL + 'create_outside_trip/',
IP_LOCATION_API: '359263af1b8a0a7c1d725ec86751962cc8801f6a',
IP_LOCATION_URL: BASE_URL + 'iplocation/?ip=',
FULL_TRIP_URL: BASE_URL + 'full_trip/',
OUTSIDE_TRIP_URL: BASE_URL + 'outside_trip/'
};
export default TripConstants;<file_sep>beautifulsoup4==4.6.0
bs4==0.0.1
click==6.7
config==0.3.9
coreapi==2.3.0
coreapi-cli==1.0.6
coreschema==0.0.4
Django==1.11.23
django-allauth==0.32.0
django-cors-headers==2.0.2
django-filter==1.0.2
django-rest-auth==0.9.1
django-webpack-loader==0.4.1
djangorestframework==3.9.1
geoip2==2.5.0
geopy==1.11.0
httpie==0.9.9
ipaddress==1.0.18
itypes==1.1.0
Jinja2==2.9.6
Markdown==2.6.8
MarkupSafe==1.0
maxminddb==1.3.0
numpy==1.12.1
oauthlib==2.0.2
olefile==0.44
pandas==0.19.2
Pillow==4.1.1
psycopg2==2.7.1
Pygments==2.2.0
python-dateutil==2.6.0
python-openid==2.2.5
pytz==2017.2
requests==2.20.0
requests-oauthlib==0.8.0
scikit-learn==0.18.1
scipy==0.19.0
simplejson==3.10.0
six==1.10.0
sklearn==0.0
SQLAlchemy==1.1.9
uritemplate==3.0.0
<file_sep>from rest_framework import serializers
from angeltrip.models import AuthUser, FullTripTable
from django.contrib.auth.models import User
from django.contrib.auth import get_user_model
from rest_auth.serializers import UserDetailsSerializer
class FullTripSearchSerializer(serializers.Serializer):
city = serializers.CharField()
state = serializers.CharField()
n_days = serializers.CharField()
class OutsideTripSearchSerializer(serializers.Serializer):
city = serializers.CharField()
state = serializers.CharField()
direction = serializers.CharField()
class CityStateSearchSerializer(serializers.Serializer):
city_state = serializers.CharField()
class FullTripSuggestDeleteSerializer(serializers.Serializer):
full_trip_id = serializers.CharField()
event_id = serializers.CharField()
trip_location_id = serializers.CharField()
class FullTripSuggestConfirmSerializer(serializers.Serializer):
full_trip_id = serializers.CharField()
event_id = serializers.CharField()
trip_location_id = serializers.CharField()
class FullTripAddSearchSerializer(serializers.Serializer):
full_trip_id = serializers.CharField()
poi_name = serializers.CharField(allow_blank=True)
trip_location_id = serializers.CharField()
class FullTripAddEventSerializer(serializers.Serializer):
poi_id = serializers.CharField(allow_blank=True)
poi_name = serializers.CharField(allow_blank=True)
trip_location_id = serializers.CharField()
full_trip_id = serializers.CharField()
class OutsideTripAddSearchSerializer(serializers.Serializer):
outside_trip_id = serializers.CharField()
poi_name = serializers.CharField(allow_blank=True)
outside_route_id = serializers.CharField()
class IPGeoLocationSerializer(serializers.Serializer):
ip = serializers.CharField()
# class UserSerializer(serializers.ModelSerializer):
# snippets = serializers.PrimaryKeyRelatedField(many=True, queryset=FullTripTable.objects.all())
# class Meta:
# model = AuthUser
# fields = ('id', 'username', 'snippets')
# class UserSerializer(serializers.ModelSerializer):
# class Meta:
# model = get_user_model()
# fields = ('password', 'username', 'email',)
# write_only_fields = ('<PASSWORD>',)
# read_only_fields = ('is_staff', 'is_superuser', 'is_active', 'date_joined',)
class FullTripTableSerializer(serializers.ModelSerializer):
class Meta:
model = FullTripTable
fields = ('index','full_trip_id', 'details')
class UserSerializer(UserDetailsSerializer):
# full_trips = FullTripTableSerializer()
full_trips = serializers.SlugRelatedField(many=True,read_only=True,slug_field='full_trip_id')
outside_trips = serializers.SlugRelatedField(many=True,read_only=True,slug_field='outside_trip_id')
# full_trip_id = serializers.CharField(source='full_trips')
# outside_trips = serializers.PrimaryKeyRelatedField(many=True,read_only=True)
# snippets = serializers.PrimaryKeyRelatedField(many=True, queryset=FullTripTable.objects.all())
class Meta:
model = User
fields = UserDetailsSerializer.Meta.fields + ('full_trips','outside_trips')
write_only_fields = ('password',)
read_only_fields = ('id',)
| c42bbc0b1d01fca548a368207a80f24f93ab5000 | [
"JavaScript",
"Python",
"Text",
"Markdown"
] | 25 | Python | gon1213/AngelTrip | 7c9171db21145d4fdd5fc27a2960ce736f85d9b8 | 2d057c5de33d8e1015b5db15f43287f60aeb5ced |
refs/heads/master | <repo_name>Pulko/pulko.github.io<file_sep>/precache-manifest.d7aebc6344ed151f790608cc2fa1f594.js
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "fd7642efa417e037f9d2c4ddbd74c1dc",
"url": "/index.html"
},
{
"revision": "add3f5a867a38ce33bec",
"url": "/static/css/main.34de6062.chunk.css"
},
{
"revision": "1ad9e37da60d4b061df3",
"url": "/static/js/2.832d989b.chunk.js"
},
{
"revision": "add3f5a867a38ce33bec",
"url": "/static/js/main.16212de6.chunk.js"
},
{
"revision": "42ac5946195a7306e2a5",
"url": "/static/js/runtime~main.a8a9905a.js"
}
]); | a7f3e4b1c3752e7d611b969b225f108f84efeb8f | [
"JavaScript"
] | 1 | JavaScript | Pulko/pulko.github.io | b054d08bbc753955812d15a072b9003d32600a86 | 3635ee0cc98ec3b5bccb7e8e74fb0e7fd0a3ee42 |
refs/heads/git-svn | <file_sep><?php
/**
* Bliki extension - Adds "Bliki" (blog in a wiki) functionality
*
* See http://www.organicdesign.co.nz/bliki for more detail
*
* @package MediaWiki
* @subpackage Extensions
* @author [http://www.organicdesign.co.nz/nad Nad]
* @copyright © 2013 [http://www.organicdesign.co.nz/nad Nad]
* @licence GNU General Public Licence 2.0 or later
*/
if( !defined( 'MEDIAWIKI' ) ) die( 'Not an entry point.' );
define( 'BLIKI_VERSION','2.0.8, 2014-10-12' );
$wgBlikiAddBodyClass = false;
$wgBlikiPostGroup = 'sysop';
$wgBlikiDefaultCat = 'Blog items';
$wgExtensionCredits['other'][] = array(
'path' => __FILE__,
'name' => 'Bliki',
'author' => '[http://www.organicdesign.co.nz/User:<NAME>]',
'url' => 'http://www.organicdesign.co.nz/bliki',
'description' => 'Adds [[Bliki]] (blog in a wiki) functionality',
'version' => BLIKI_VERSION
);
$dir = dirname( __FILE__ );
$wgExtensionMessagesFiles['Bliki'] = $wgExtensionMessagesFiles['BlikiFeed'] = "$dir/Bliki.i18n.php";
include( "$dir/BlikiFeed.php" );
class Bliki {
function __construct() {
global $wgHooks, $wgBlikiAddBodyClass;
$wgHooks['UnknownAction'][] = $this;
if( $wgBlikiAddBodyClass ) $wgHooks['OutputPageBodyAttributes'][] = $this;
}
function onUnknownAction( $action, $article ) {
global $wgOut, $wgRequest, $wgUser, $wgParser, $wgBlikiPostGroup;
if( $action == 'blog' && in_array( $wgBlikiPostGroup, $wgUser->getEffectiveGroups() ) ) {
$newtitle = $wgRequest->getText( 'newtitle' );
$title = Title::newFromText( $newtitle );
$error = false;
if( !is_object( $title ) ) {
$wgOut->addWikitext( '<div class="previewnote">Error: Bad title!</div>' );
$error = true;
}
elseif( $title->exists() ) {
$wgOut->addWikitext( '<div class="previewnote">Error: Title already exists!</div>' );
$error = true;
}
if( !$error ) {
$summary = $wgRequest->getText( 'summary' );
$content = $wgRequest->getText( 'content' );
$user = $wgUser->getName();
$date = date('U');
$sig = '<div class="blog-sig">{{BlogSig|' . "$user|@$date" . '}}</div>';
$type = $wgRequest->getText( 'type' );
switch( $type ) {
// Preview the item
case "Full preview":
$wikitext = "$sig\n$summary\n\n$content";
self::preview( $type, $title, $wikitext );
$article->view();
break;
// Preview the item in news/blog format
case "Summary preview":
$wikitext = "{|class=\"blog\"\n|\n== [[Post a blog item|$newtitle]] ==\n|-\n!$sig\n|-\n|$summary\n|}__NOEDITSECTION__";
$title = Title::newFromText( 'Blog' );
self::preview( $type, $title, $wikitext );
$article->view();
break;
// Create the item with tags as category links
case "Post":
$wikitext = '{{' . "Blog|1=$summary|2=$content" . '}}';
$wikitext .= "<noinclude>[[Category:Blog items]][[Category:Posts by $user]]";
foreach( array_keys( $_POST ) as $k ) {
if( preg_match( "|^tag(.+)$|", $k, $m ) ) {
$wikitext .= '[[Category:' . str_replace( '_', ' ', $m[1] ) . ']]';
}
}
$wikitext .= "</noinclude>";
$article = new Article( $title );
$article->doEdit( $wikitext, 'Blog item created via post form', EDIT_NEW );
$wgOut->redirect( $title->getFullURL() );
break;
}
} else $article->view();
return false;
}
return true;
}
function preview( $heading, $title, $wikitext ) {
global $wgOut, $wgParser;
$wgOut->addWikitext( '<div class="previewnote">' . wfMsg( 'previewnote' ) . '</div>' );
$wgOut->addWikitext( "== $heading ==" );
$wgOut->addHTML( $wgParser->parse( $wikitext, $title, new ParserOptions(), true, true )->getText() );
$wgOut->addHTML( "<br /><hr /><br />" );
}
function onOutputPageBodyAttributes( $out, $sk, &$bodyAttrs ) {
if( SpecialBlikiFeed::inCat( 'Blog_items' ) ) $bodyAttrs['class'] .= ' blog-item';
return true;
}
}
new Bliki();
<file_sep><?php
/**
* ExtraMagic extension - Adds useful variables and parser functions
*
* See http://www.organicdesign.co.nz/Extension:ExtraMagic.php
*
* @package MediaWiki
* @subpackage Extensions
* @author [http://www.organicdesign.co.nz/User:Nad User:Nad]
* @copyright © 2007 [http://www.organicdesign.co.nz/User:Nad User:Nad]
* @licence GNU General Public Licence 2.0 or later
*/
if( !defined( 'MEDIAWIKI' ) ) die('Not an entry point.' );
define( 'EXTRAMAGIC_VERSION', '3.5.2, 2014-10-22' );
$wgExtensionCredits['parserhook'][] = array(
'name' => 'ExtraMagic',
'author' => '[http://www.organicdesign.co.nz/User:<NAME>]',
'description' => 'Adds useful variables and parser functions',
'url' => 'http://www.organicdesign.co.nz/Extension:ExtraMagic.php',
'version' => EXTRAMAGIC_VERSION
);
$wgExtraMagicVariables = array(
'CURRENTUSER',
'CURRENTPERSON',
'CURRENTLANG',
'CURRENTSKIN',
'ARTICLEID',
'IPADDRESS',
'DOMAIN',
'GUID',
'USERPAGESELFEDITS'
);
class ExtraMagic {
function __construct() {
global $wgHooks, $wgExtensionFunctions;
$wgExtensionFunctions[] = array( $this, 'setup' );
$wgHooks['LanguageGetMagic'][] = $this;
$wgHooks['MagicWordwgVariableIDs'][] = $this;
$wgHooks['ParserGetVariableValueVarCache'][] = $this;
}
function setup() {
global $wgParser;
$wgParser->setFunctionHook( 'REQUEST', array( $this, 'expandRequest' ), SFH_NO_HASH );
$wgParser->setFunctionHook( 'COOKIE', array( $this, 'expandCookie' ), SFH_NO_HASH );
$wgParser->setFunctionHook( 'USERID', array( $this, 'expandUserID' ), SFH_NO_HASH );
$wgParser->setFunctionHook( 'IFGROUP', array( $this, 'expandIfGroup' ) );
$wgParser->setFunctionHook( 'IFUSES', array( $this, 'expandIfUses' ) );
$wgParser->setFunctionHook( 'IFCAT', array( $this, 'expandIfCat' ) );
$wgParser->setFunctionHook( 'PREV', array( $this, 'expandPrev' ) );
$wgParser->setFunctionHook( 'NEXT', array( $this, 'expandNext' ) );
$wgParser->setFunctionHook( 'OWNER', array( $this, 'expandOwner' ), SFH_NO_HASH );
}
function onLanguageGetMagic( &$magicWords, $langCode = null ) {
global $wgExtraMagicVariables;
// Magic words
foreach( $wgExtraMagicVariables as $var ) $magicWords[strtolower( $var )] = array( 1, $var );
// Parser functions
$magicWords['REQUEST'] = array( 0, 'REQUEST' );
$magicWords['COOKIE'] = array( 0, 'COOKIE' );
$magicWords['USERID'] = array( 0, 'USERID' );
$magicWords['IFGROUP'] = array( 0, 'IFGROUP' );
$magicWords['IFUSES'] = array( 0, 'IFUSES' );
$magicWords['IFCAT'] = array( 0, 'IFCAT' );
$magicWords['PREV'] = array( 0, 'PREV' );
$magicWords['NEXT'] = array( 0, 'NEXT' );
$magicWords['OWNER'] = array( 0, 'OWNER' );
return true;
}
function onMagicWordwgVariableIDs( &$variableIDs ) {
global $wgExtraMagicVariables;
foreach( $wgExtraMagicVariables as $var ) $variableIDs[] = strtolower( $var );
return true;
}
function onParserGetVariableValueVarCache( &$parser, &$varCache ) {
global $wgUser, $wgTitle;
// CURRENTUSER
$varCache['currentuser'] = $wgUser->mName;
// CURRENTPERSON:
$varCache['currentperson'] = $wgUser->getRealName();
// CURRENTLANG:
$varCache['currentlang'] = $wgUser->getOption( 'language' );
// CURRENTSKIN:
$varCache['currentskin'] = $wgUser->getOption( 'skin' );
// ARTICLEID:
$varCache['articleid'] = is_object( $wgTitle ) ? $ret = $wgTitle->getArticleID() : 'NULL';
// IPADDRESS:
$varCache['ipaddress'] = array_key_exists( 'REMOTE_ADDR', $_SERVER ) ? $_SERVER['REMOTE_ADDR'] : '127.0.0.1';
// DOMAIN:
$varCache['domain'] = array_key_exists( 'SERVER_NAME', $_SERVER ) ? str_replace( 'www.', '', $_SERVER['SERVER_NAME'] ) : 'localhost';
// GUID:
$varCache['guid'] = strftime( '%Y%m%d', time() ) . '-' . substr( strtoupper( uniqid('', true) ), -5 );
// USERPAGESELFEDITS
$out = '';
$dbr = wfGetDB( DB_SLAVE );
$tbl = array( 'user', 'page', 'revision' );
$cond = array(
'user_name = page_title',
'rev_page = page_id',
'rev_user = user_id'
);
$res = $dbr->select( $tbl, 'user_name', $cond, __METHOD__, array( 'DISTINCT', 'ORDER BY' => 'user_name' ) );
foreach( $res as $row ) {
$title = Title::newFromText( $row->user_name, NS_USER );
if( is_object( $title ) && $title->exists() ) $out .= "*[[User:{$row->user_name}|{$row->user_name}]]\n";
}
$varCache['userpageselfedits'] = $out;
return true;
}
/**
* Expand parser functions
*/
public static function expandRequest( &$parser, $param, $default = '', $seperator = "\n" ) {
$parser->disableCache();
$val = array_key_exists( $param, $_REQUEST ) ? $_REQUEST[$param] : $default;
if( is_array( $val ) ) $val = implode( $seperator, $val );
return $val;
}
public static function expandCookie( &$parser, $param, $default = '' ) {
$parser->disableCache();
return array_key_exists( $param, $_COOKIE ) ? $_COOKIE[$param] : $default;
}
public static function expandUserID( &$parser, $param ) {
if( $param ) {
$col = strpos( $param, ' ' ) ? 'user_real_name' : 'user_name';
$dbr = wfGetDB( DB_SLAVE );
if( $row = $dbr->selectRow( 'user', array( 'user_id' ), array( $col => $param ) ) ) return $row->user_id;
} else {
global $wgUser;
return $wgUser->getID();
}
return '';
}
public static function expandIfGroup( &$parser, $groups, $then, $else = '' ) {
global $wgUser;
$intersection = array_intersect( array_map( 'strtolower', explode( ',', $groups ) ), $wgUser->getEffectiveGroups() );
return count( $intersection ) > 0 ? $then : $else;
}
public static function expandIfUses( &$parser, $tmpl, $then, $else = '' ) {
global $wgTitle;
$dbr = wfGetDB( DB_SLAVE );
$tmpl = $dbr->addQuotes( Title::newFromText( $tmpl )->getDBkey() );
$id = $wgTitle->getArticleID();
return $dbr->selectRow( 'templatelinks', '1', "tl_from = $id AND tl_namespace = 10 AND tl_title = $tmpl" ) ? $then : $else;
}
public static function expandIfCat( &$parser, $cat, $then, $else = '' ) {
global $wgTitle;
$id = $wgTitle->getArticleID();
$dbr = wfGetDB( DB_SLAVE );
$cat = $dbr->addQuotes( Title::newFromText( $cat )->getDBkey() );
return $dbr->selectRow( 'categorylinks', '1', "cl_from = $id AND cl_to = $cat" ) ? $then : $else;
}
public static function expandNext( $parser, $list ) {
return self::nextprev( $list, 1 );
}
public static function expandPrev( $parser, $list ) {
return self::nextprev( $list, -1 );
}
public static function nextprev( $l, $j ) {
global $wgTitle;
$r = '';
$l = preg_replace( '|\s*\[\[.+|', '', $l ); // ensure there's no "further results" link on the end
$l = explode( '#', $l );
$i = array_search( $wgTitle->getPrefixedText(), $l );
if( $i !== false && array_key_exists( $i+$j, $l ) ) $r = $l[$i+$j];
return $r;
}
public static function expandOwner( $parser, $title ) {
$owner = '';
if( empty( $title ) ) {
global $wgTitle;
$title = $wgTitle;
} else $title = Title::newFromText( $title );
$id = $title->getArticleID();
$dbr = wfGetDB( DB_SLAVE );
if( $id > 0 && $row = $dbr->selectRow( 'revision', 'rev_user', array( 'rev_page' => $id ), __METHOD__, array( 'ORDER BY' => 'rev_timestamp' ) ) ) {
$owner = User::newFromID( $row->rev_user )->getName();
}
return $owner;
}
}
new ExtraMagic();
<file_sep><?php
/**
* Internationalisation file for the extension PdfBook
*
* @file
* @ingroup Extensions
* @author <NAME>
* @licence GNU General Public Licence 2.0 or later
*/
$messages = array();
/** English
* @author <NAME>
*/
$messages['en'] = array(
'pdfbook-action' => 'Print as PDF',
'pdfbook-log' => '$1 exported as a PDF book',
'pdfbook-desc' => 'Composes a book from pages in a category and exports as a PDF book',
);
/** Message documentation (Message documentation)
* @author Lloffiwr
*/
$messages['qqq'] = array(
'pdfbook-log' => '$1 is the username of the user who exports the PDF book',
);
/** Afrikaans (Afrikaans)
* @author Naudefj
*/
$messages['af'] = array(
'pdfbook-action' => 'Druk as PDF',
'pdfbook-log' => "$1 is as 'n PDF-boek geëksporteer",
'pdfbook-desc' => "Maak 'n boek van bladsye in 'n kategorie en eksporteer as 'n PDF-boek",
);
/** Belarusian (Taraškievica orthography) (Беларуская (тарашкевіца))
* @author EugeneZelenko
* @author Wizardist
*/
$messages['be-tarask'] = array(
'pdfbook-action' => 'Друкаваць у фармаце PDF',
'pdfbook-log' => '$1 экспартаваная як кніга ў фармаце PDF',
'pdfbook-desc' => 'Стварае кнігу са старонак у катэгорыі і экспартуе яе ў фармат PDF',
);
/** Bengali (বাংলা)
* @author Bellayet
* @author Wikitanvir
*/
$messages['bn'] = array(
'pdfbook-action' => 'পিডিএফ হিসেবে মুদ্রণ',
'pdfbook-log' => '$1 পিডিএফ বই হিসেবে তৈরিকৃত',
);
/** Breton (Brezhoneg)
* @author Y-M D
*/
$messages['br'] = array(
'pdfbook-action' => 'Moullañ er furmad PDF',
'pdfbook-log' => '$1 enporzhiet dindan stumm ul levr PDF',
'pdfbook-desc' => "A sav ul levr adalek pajennoù ur rummad hag ec'h enporzh anezhañ evel ul levr er furmad PDF",
);
/** Bosnian (Bosanski)
* @author CERminator
*/
$messages['bs'] = array(
'pdfbook-action' => 'Štampaj kao PDF',
'pdfbook-log' => '$1 izvezena kao PDF knjiga',
'pdfbook-desc' => 'Sastavlja knjigu od stranica u kategoriji i izvozi ih kao PDF knjigu',
);
/** Catalan (Català)
* @author Aleator
*/
$messages['ca'] = array(
'pdfbook-action' => 'Imprimeix com a PDF',
'pdfbook-log' => '$1 exportat com a llibre PDF',
'pdfbook-desc' => 'Compon un llibre a partir de pàgines en una categoria i ho exporta com a llibre PDF',
);
/** Chechen (Нохчийн)
* @author Sasan700
*/
$messages['ce'] = array(
'pdfbook-action' => 'Зорба оц PDF',
'pdfbook-log' => '$1 арадаькхина PDF-жайна санна',
'pdfbook-desc' => 'Кадегари агlонех кхуллу жайна, PDF бараме дерзош',
);
/** Welsh (Cymraeg)
* @author Lloffiwr
*/
$messages['cy'] = array(
'pdfbook-action' => 'Argraffu ar ffurf PDF',
'pdfbook-log' => "Mae $1 wedi allforio'r llyfr ar ffurf PDF",
'pdfbook-desc' => 'Yn defnyddio tudalennau o rhyw gategori i lunio llyfr, ac allforio hwnnw ar ffurf llyfr PDF',
);
/** Danish (Dansk)
* @author <NAME>
*/
$messages['da'] = array(
'pdfbook-action' => 'Udskriv som PDF',
'pdfbook-log' => '$1 eksporteret som en PDF-bog',
'pdfbook-desc' => 'Sammensætter en bog fra siderne i en kategori og eksporterer den som PDF',
);
/** German (Deutsch)
* @author Kghbln
*/
$messages['de'] = array(
'pdfbook-action' => 'Als PDF-Datei ausgeben',
'pdfbook-log' => '$1 wurde als Zusammenstellung in einer PDF-Datei erstellt',
'pdfbook-desc' => 'Ermöglicht die Erstellung von PDF-Dateien einzelner Seiten oder gesammelt aller in einer Kategorie vorhandener Seiten',
);
/** Lower Sorbian (Dolnoserbski)
* @author Michawiki
*/
$messages['dsb'] = array(
'pdfbook-action' => 'Ako PDF śišćaś',
'pdfbook-log' => '$1 jo se ako PDF-knigły eksportěrował',
'pdfbook-desc' => 'Staja knigły z bokow w kategoriji gromadu a eksportěrujo je ako PDF-knigły',
);
/** Basque (Euskara)
* @author An13sa
*/
$messages['eu'] = array(
'pdfbook-action' => 'PDF gisa inprimatu',
'pdfbook-log' => '$1 PDF liburu bezala esportatu da',
'pdfbook-desc' => 'Kategoria bateko orriak hartu eta PDF liburu gisa esportatzen ditu',
);
/** Persian (فارسی)
* @author Huji
* @author محک
*/
$messages['fa'] = array(
'pdfbook-action' => 'چاپ به صورت پیدیاف',
'pdfbook-log' => '$1 برونریزی شده به صورت یک کتاب پیدیاف',
'pdfbook-desc' => 'از صفحههای یک رده کتابی میسازد و به صورت یک کتاب پیدیاف برونریزی میکند',
);
/** French (Français)
* @author Jean-Frédéric
*/
$messages['fr'] = array(
'pdfbook-action' => 'Imprimer au format PDF',
'pdfbook-log' => '$1 exporté sous forme de livre PDF',
'pdfbook-desc' => 'Compose un livre à partir des pages d’une catégorie et exporte comme un livre au format PDF',
);
/** Franco-Provençal (Arpetan)
* @author ChrisPtDe
*/
$messages['frp'] = array(
'pdfbook-action' => 'Emprimar u format PDF',
'pdfbook-log' => '$1 èxportâ desot fôrma de lévro PDF',
);
/** Galician (Galego)
* @author Toliño
*/
$messages['gl'] = array(
'pdfbook-action' => 'Imprimir como PDF',
'pdfbook-log' => '$1 exportado como libro en PDF',
'pdfbook-desc' => 'Composición dun libro a partir das páxinas presentes nunha categoría e exportación como un libro en PDF',
);
/** Swiss German (Alemannisch)
* @author Als-Holder
*/
$messages['gsw'] = array(
'pdfbook-action' => 'As PDF-Datei uusgee',
'pdfbook-log' => '$1 exportiert as PDF-Buech',
'pdfbook-desc' => 'Stellt e Buech vu Syte us ere Kategori zämme un exportiert s as PDF-Buech',
);
/** Hebrew (עברית)
* @author Amire80
*/
$messages['he'] = array(
'pdfbook-action' => 'הדפסה כקובץ PDF',
'pdfbook-log' => '$1 יוּצָא כספר PDF',
'pdfbook-desc' => 'לערוך ספר מדפים בקטגוריה וייצא אותו כ־PDF',
);
/** Upper Sorbian (Hornjoserbsce)
* @author Michawiki
*/
$messages['hsb'] = array(
'pdfbook-action' => 'Jako PDF ćišćeć',
'pdfbook-log' => '$1 bu jako PDF-kniha eksportowany',
'pdfbook-desc' => 'Zestaja knihu ze stronow w kategoriji a eksportuje jako PDF-knihu',
);
/** Hungarian (Magyar)
* @author BáthoryPéter
*/
$messages['hu'] = array(
'pdfbook-action' => 'Nyomtatás PDF formátumban',
'pdfbook-log' => '$ 1 exportálva PDF-könyvként',
'pdfbook-desc' => 'Kiválasztott oldalakat kategóriába helyez, összeállít belőlük egy könyvet, és exportálja PDF-könyvként',
);
/** Interlingua (Interlingua)
* @author McDutchie
*/
$messages['ia'] = array(
'pdfbook-action' => 'Imprimer como PDF',
'pdfbook-log' => '$1 exportate como libro in PDF',
'pdfbook-desc' => 'Compone un libro ex paginas in un categoria e exporta lo como libro in PDF',
);
/** Indonesian (Bahasa Indonesia)
* @author IvanLanin
*/
$messages['id'] = array(
'pdfbook-action' => 'Cetak sebagai PDF',
'pdfbook-log' => '$1 diekspor sebagai buku PDF',
'pdfbook-desc' => 'Menyusun suatu buku dari halaman dalam kategori dan mengekspornya sebagai buku PDF',
);
/** Iloko (Ilokano)
* @author Lam-ang
*/
$messages['ilo'] = array(
'pdfbook-action' => 'Imaldit a kasla PDF',
'pdfbook-log' => '$1 iangkat a kasla PDF na libro',
'pdfbook-desc' => 'Agaramid ti libro nga naggapu kadagiti pampanid iti kategoria ken iangkat a kasla PDF a libro',
);
/** Italian (Italiano)
* @author Beta16
*/
$messages['it'] = array(
'pdfbook-action' => 'Stampa in formato PDF',
'pdfbook-log' => '$1 esportato come libro in PDF',
'pdfbook-desc' => 'Compone un libro dalle pagine in una categoria ed esporta come libro in PDF',
);
/** Japanese (日本語)
* @author Iwai.masaharu
* @author 青子守歌
*/
$messages['ja'] = array(
'pdfbook-action' => 'PDFとして印刷する',
'pdfbook-log' => '$1をPDFブックとしてエクスポート',
'pdfbook-desc' => 'カテゴリ内のページから本を構築し、PDFブックとしてエクスポートする',
);
/** Khmer (ភាសាខ្មែរ)
* @author គីមស៊្រុន
*/
$messages['km'] = array(
'pdfbook-action' => 'បោះពុម្ភជា PDF',
'pdfbook-log' => '$1 នាំចេញជាសៀវភៅ PDF',
'pdfbook-desc' => 'តែងសៀវភៅពីទំព័រនានាក្នុងចំណាត់ក្រុមមួយ រួចនាំចេញជាសៀវភៅ PDF',
);
/** Colognian (Ripoarisch)
* @author Purodha
*/
$messages['ksh'] = array(
'pdfbook-action' => 'Als PDF-Datei dröcke',
'pdfbook-log' => '$1 wood als en Aat Booch en en PDF-Dattei expoteet',
'pdfbook-desc' => 'Ställd e Booch zesamme us dä Sigge en ene Saachjropp un expoteed et als en Aat Booch en en PDF-Dattei.',
);
/** Kurdish (Latin) (Kurdî (Latin))
* @author <NAME>
*/
$messages['ku-latn'] = array(
'pdfbook-action' => 'Weka PDF çap bike',
);
/** Luxembourgish (Lëtzebuergesch)
* @author Robby
*/
$messages['lb'] = array(
'pdfbook-action' => 'Als PDF drécken',
'pdfbook-log' => '$1 gouf als PDF-Buch exportéiert',
'pdfbook-desc' => 'Setzt e Buch aus Säiten an eng Kategorie an exportéiert se als PDF-Buch',
);
/** Latvian (Latviešu)
* @author GreenZeb
* @author Papuass
*/
$messages['lv'] = array(
'pdfbook-action' => 'Drukāt kā PDF',
'pdfbook-log' => '$1 pārstrādāja šo kā grāmatu PDF failā',
'pdfbook-desc' => 'Izveido grāmatu no kategorijā esošajām lapām un pārstrādā to PDF formātā',
);
/** Macedonian (Македонски)
* @author Bjankuloski06
*/
$messages['mk'] = array(
'pdfbook-action' => 'Испечати како PDF',
'pdfbook-log' => 'Извоз на $1 како PDF-книга',
'pdfbook-desc' => 'Составува книга од страници во извесна категорија и ја извезува во PDF-формат',
);
/** Malay (Bahasa Melayu)
* @author Anakmalaysia
*/
$messages['ms'] = array(
'pdfbook-action' => 'Cetak dalam bentuk PDF',
'pdfbook-log' => '$1 dieksport dalam bentuk buku PDF',
'pdfbook-desc' => 'Mengarang buku daripada laman-laman dalam satu kategori lalu mengeksportnya dalam bentuk buku PDF',
);
/** Mazanderani (مازِرونی)
* @author محک
*/
$messages['mzn'] = array(
'pdfbook-action' => 'چاپ به شکل پیدیاف',
'pdfbook-log' => '$1 برونریزی بیی به صورت اتا کتاب پیدیاف',
'pdfbook-desc' => 'از صفحهئون اتا رج کتاب ساجنه و به صورت اتا کتاب پیدیاف برونریزی کانده',
);
/** Dutch (Nederlands)
* @author Siebrand
*/
$messages['nl'] = array(
'pdfbook-action' => 'Afdrukken als PDF',
'pdfbook-log' => '$1 is geëxporteerd als PDF-boek',
'pdfbook-desc' => "Maakt een boek van pagina's in een categorie en maakt een export als PDF-boek",
);
/** Norwegian (bokmål) (Norsk (bokmål))
* @author Nghtwlkr
*/
$messages['no'] = array(
'pdfbook-action' => 'Skriv ut som PDF',
'pdfbook-log' => '$1 eksportert som en PDF-bok',
'pdfbook-desc' => 'Komponerer en bok fra sider i en kategori og eksporterer dem som en PDF-bok',
);
/** Polish (Polski)
* @author Sp5uhe
*/
$messages['pl'] = array(
'pdfbook-action' => 'Drukuj do PDF',
'pdfbook-log' => 'wyeksportowano $1 jako książkę w formacie PDF',
'pdfbook-desc' => 'Tworzenie książki ze stron kategorii i eksportowanie w formacie PDF',
);
/** Piedmontese (Piemontèis)
* @author Borichèt
* @author Dragonòt
*/
$messages['pms'] = array(
'pdfbook-action' => 'Stampa com PDF',
'pdfbook-log' => '$1 esportà coma lìber PDF',
'pdfbook-desc' => 'A compon un lìber a parte da le pàgine an na categorìa e a lo espòrta com un lìber an PDF',
);
/** Portuguese (Português)
* @author <NAME>
*/
$messages['pt'] = array(
'pdfbook-action' => 'Imprimir como PDF',
'pdfbook-log' => '$1 exportado como um livro PDF',
'pdfbook-desc' => 'Compõe um livro com as páginas de uma categoria e exporta-o como um livro PDF',
);
/** Brazilian Portuguese (Português do Brasil)
* @author Garrasdalua
*/
$messages['pt-br'] = array(
'pdfbook-action' => 'Imprima como PDF',
'pdfbook-log' => '$1 exportado como um livro PDF',
'pdfbook-desc' => 'Componha um livro com paginas de uma categoria e o exporte como um livro PDF',
);
/** Russian (Русский)
* @author <NAME>
*/
$messages['ru'] = array(
'pdfbook-action' => 'Печать в PDF',
'pdfbook-log' => '$1 экспортирована как PDF-книга',
'pdfbook-desc' => 'Создаёт книгу из страниц категории, преобразует её в PDF',
);
/** Slovak (Slovenčina)
* @author Helix84
*/
$messages['sk'] = array(
'pdfbook-action' => 'Vytlačiť do PDF',
'pdfbook-log' => 'Stránka $1 bola exportovaná ako kniha vo formáte PDF',
'pdfbook-desc' => 'Zostavá knihu z stránok v kategórii a exportuje ju vo formáte PDF',
);
/** Swedish (Svenska)
* @author Diupwijk
*/
$messages['sv'] = array(
'pdfbook-action' => 'Skriv ut som PDF',
'pdfbook-log' => '$1 exporterad som PDF-bok',
'pdfbook-desc' => 'Sätter samman en bok från sidorna i en kategori och exporterar dem som PDF',
);
/** Swahili (Kiswahili)
* @author Lloffiwr
*/
$messages['sw'] = array(
'pdfbook-action' => 'Chapa kwa mtindo wa PDF',
'pdfbook-log' => '$1 amekipeleka kitabu nje kwa mtindo wa PDF',
'pdfbook-desc' => 'Inaunda kurasa za jamii fulani katika kitabu, na kukipeleka nje kwa mtindo wa PDF',
);
/** Telugu (తెలుగు)
* @author రహ్మానుద్దీన్
*/
$messages['te'] = array(
'pdfbook-action' => 'PDF గా ప్రచురించు',
'pdfbook-log' => '$1 PDF పుస్తకంగా ఎగుమతి చేయబడింది',
);
/** Tagalog (Tagalog)
* @author AnakngAraw
*/
$messages['tl'] = array(
'pdfbook-action' => 'Ilimbag bilang PDF',
'pdfbook-log' => 'Iniluwas ang $1 bilang isang aklat na PDF',
'pdfbook-desc' => 'Bumubuo ng isang aklat mula sa mga pahinang nasa loob ng isang kategorya at nagluluwas bilang isang aklat na PDF',
);
<file_sep><?php
/**
* Bliki extension - Adds "Bliki" (blog in a wiki) functionality
*
* See http://www.organicdesign.co.nz/bliki for more detail
*
* @package MediaWiki
* @subpackage Extensions
* @author [http://www.organicdesign.co.nz/nad Nad]
* @copyright © 2013 [http://www.organicdesign.co.nz/nad Nad]
* @licence GNU General Public Licence 2.0 or later
*/
/**
* Add a new special page for blog feeds based on Special:RecentChanges
*/
if( !isset( $wgBlikiDefaultCat ) ) $wgBlikiDefaultCat = 'Blog items';
$wgSpecialPages['BlikiFeed'] = 'SpecialBlikiFeed';
class SpecialBlikiFeed extends SpecialRecentChanges {
// Construct special page with our new name, and force to feed type
public function __construct() {
global $wgHooks;
$wgHooks['SpecialRecentChangesQuery'][] = $this;
if( !$this->getRequest()->getVal( 'feed' ) ) $this->getRequest()->setVal( 'feed', 'rss' );
if( !$this->getRequest()->getVal( 'days' ) ) $this->getRequest()->setVal( 'days', 1000 );
parent::__construct( 'BlikiFeed' );
}
// Inject a value into opts so we can know on the hook function that its a bliki feed
public function doMainQuery( $conds, $opts ) {
global $wgBlikiDefaultCat, $wgServer, $wgArticlePath, $wgScriptPath, $wgScript;
$opts->add( 'bliki', false );
$opts['bliki'] = array_key_exists( 'q', $_REQUEST ) ? $_REQUEST['q'] : $wgBlikiDefaultCat;
// Make all links absolute
$wgArticlePath = $wgServer.$wgArticlePath;
$wgScriptPath = $wgServer.$wgScriptPath;
$wgScript = $wgServer.$wgScript;
// Allow printing raw desc results
if( array_key_exists( 'test', $_REQUEST ) ) {
$t = Title::newFromText( $_REQUEST['test'] );
print BlikiChangesFeed::desc($t);
die;
}
// Add the rollback right to the user object so that the page join exists, because without it the new category join fails
$user = $this->getUser();
$rights = $user->mRights;
$user->mRights[] = 'rollback';
$res = parent::doMainQuery( $conds, $opts );
$user->mRights = $rights;
return $res;
}
// If it's a bliki list, filter the list to onlynew items and to the tag cat if q supplied
public static function onSpecialRecentChangesQuery( &$conds, &$tables, &$join_conds, $opts, &$query_options, &$fields ) {
if( $opts->validateName( 'bliki' ) ) {
$tables[] = 'categorylinks';
$conds[] = 'rc_new=1';
$dbr = wfGetDB( DB_SLAVE );
if( is_array( $opts['bliki'] ) ) {
foreach( $opts['bliki'] as $i => $cat ) $opts['bliki'][$i] = Title::newFromText( $cat )->getDBkey();
$catCond = 'cl_to IN (' . $dbr->makeList( $opts['bliki'] ) . ')';
} else $catCond = 'cl_to =' . $dbr->addQuotes( Title::newFromText( $opts['bliki'] )->getDBkey() );
$join_conds['categorylinks'] = array( 'RIGHT JOIN', "cl_from=page_id AND $catCond" );
}
return true;
}
public function getFeedObject( $feedFormat ) {
global $wgRequest, $wgSitename;
// Blog title & description
$q = $wgRequest->getVal( 'q', false );
$cat = $q ? Title::newFromText( $q )->getText() : false;
$tag = $cat ? self::inCat( 'Tags', $cat ) : false;
$title = str_replace( ' wiki', '', $wgSitename ) . ' blog';
$desc = $cat ? ( $tag ? "\"$cat\" posts" : lcfirst( $cat ) ) : 'posts';
$desc = wfMsg( 'bliki-desc', $desc, $wgSitename );
// Blog URL
$blog = Title::newFromText( 'Blog' );
$url = $cat ? $blog->getFullURL( "q=$cat" ) : $blog->getFullURL();
// Instantiate our custom ChangesFeed class
$changesFeed = new BlikiChangesFeed( $feedFormat, 'rcfeed' );
$formatter = $changesFeed->getFeedObject( $title, $desc, $url );
return array( $changesFeed, $formatter );
}
/**
* Return whether or not the passed title is a member of the passed cat
*/
public static function inCat( $cat, $title = false ) {
global $wgTitle;
if( $title === false ) $title = $wgTitle;
if( !is_object( $title ) ) $title = Title::newFromText( $title );
$id = $title->getArticleID();
$dbr = wfGetDB( DB_SLAVE );
$cat = $dbr->addQuotes( Title::newFromText( $cat, NS_CATEGORY )->getDBkey() );
return $dbr->selectRow( 'categorylinks', '1', "cl_from = $id AND cl_to = $cat" );
}
}
/**
* Our BlikiChanges special page uses this custom ChangesFeed
* which has the item description changed to the plain-text blog item summary instead of the usual diff/wikitext
*/
class BlikiChangesFeed extends ChangesFeed {
// This is just an exact copy of the parent, we had to override because it calls self::generateFeed
public function execute( $feed, $rows, $lastmod, $opts ) {
global $wgLang, $wgRenderHashAppend;
if ( !FeedUtils::checkFeedOutput( $this->format ) ) {
return null;
}
$optionsHash = md5( serialize( $opts->getAllValues() ) ) . $wgRenderHashAppend;
$timekey = wfMemcKey( $this->type, $this->format, $wgLang->getCode(), $optionsHash, 'timestamp' );
$key = wfMemcKey( $this->type, $this->format, $wgLang->getCode(), $optionsHash );
FeedUtils::checkPurge( $timekey, $key );
$cachedFeed = $this->loadFromCache( $lastmod, $timekey, $key );
if( is_string( $cachedFeed ) ) {
$feed->httpHeaders();
echo $cachedFeed;
} else {
ob_start();
self::generateFeed( $rows, $feed );
$cachedFeed = ob_get_contents();
ob_end_flush();
$this->saveToCache( $cachedFeed, $timekey, $key );
}
return true;
}
// Much more compact version than parent because only new items by known authors will be in the list
public static function generateFeed( $rows, &$feed ) {
$feed->outHeader();
foreach( $rows as $obj ) {
$title = Title::makeTitle( $obj->rc_namespace, $obj->rc_title );
$url = $title->getFullURL();
$item = new FeedItem( $title->getPrefixedText(), self::desc( $title ), $url, $obj->rc_timestamp, $obj->rc_user_text, $url );
$feed->outItem( $item );
}
$feed->outFooter();
}
// Use the plain-text of the summary for the item description
static function desc( $title ) {
global $wgParser;
$article = new Article( $title );
$content = $article->getContent();
$text = preg_match( "/^.+?1=(.+?)\|2=/s", $content, $m ) ? $m[1] : $title->getText();
$html = $wgParser->parse( trim( $text ), $title, new ParserOptions(), true, true )->getText();
$html = preg_replace( '|<a[^<]+<img .+?</a>|', '', $html );
$desc = strip_tags( $html, '<p><a><i><b><u><s>' );
$desc = preg_replace( "/[\r\n]+/", "", $desc );
$desc = preg_replace( "|<p></p>|", "", $desc );
$desc = trim( preg_replace( "|<p>|", "\n<p>", $desc ) );
return $desc;
}
}
<file_sep>$(document).ready( function() {
var poll = mw.config.get('wgAjaxCommentsPollServer');
// If a value of -1 has been supplied for this, then comments are disabled for this page
if(poll < 0) return;
// If the comments area has been added, render the discussion into it
if($('#ajaxcomments-name').length > 0) {
// Change the talk page tab to a local link to the comments at the end of the page if it exists
$('#ca-talk a').attr('href','#ajaxcomments');
$('#ca-talk').removeClass('new');
// Create a target for the comments and put a loader in it
$('#ajaxcomments-name').after('<div id="ajaxcomments"><div class="ajaxcomments-loader"></div></div>');
// Ask the server for the rendered comments
$.ajax({
type: 'GET',
url: mw.util.wikiScript(),
data: { action: 'ajaxcomments', title: mw.config.get('wgPageName') },
dataType: 'html',
success: function(html) { $('#ajaxcomments').html(html); }
});
}
// If server polling is enabled, set up a regular ajax request
if(poll > 0) {
setInterval( function() {
// Ask the server for the rendered comments if they've changed
$.ajax({
type: 'GET',
url: mw.util.wikiScript(),
data: { action: 'ajaxcomments', title: mw.config.get('wgPageName'), ts: $('#ajaxcomment-timestamp').html() },
dataType: 'html',
success: function(html) {
if(html) $('#ajaxcomments').html(html);
}
});
}, poll * 1000);
}
});
/**
* An add link has been clicked
*/
window.ajaxcomment_add = function() {
ajaxcomment_textinput($('#ajaxcomment-add').parent(), 'add');
$('#ajaxcomments-none').remove();
};
/**
* An edit link has been clicked
*/
window.ajaxcomment_edit = function(id) {
var e = $('#ajaxcomments-' + id + ' .ajaxcomment-text').first();
ajaxcomment_textinput(e, 'edit');
ajaxcomment_source( id, $('textarea', e.parent()).first() );
e.hide();
};
/**
* A reply link has been clicked
*/
window.ajaxcomment_reply = function(id) {
ajaxcomment_textinput($('#ajaxcomments-' + id + ' .ajaxcomment-links').first(), 'reply');
};
/**
* An delete link has been clicked
*/
window.ajaxcomment_del = function(id) {
var target = $('#ajaxcomments-' + id);
var buttons = {};
buttons[mw.message( 'ajaxcomments-yes' ).escaped()] = function() {
target.html('<div class="ajaxcomments-loader"></div>');
$.ajax({
type: 'GET',
url: mw.util.wikiScript(),
data: {
action: 'ajaxcomments',
title: mw.config.get('wgPageName'),
cmd: 'del',
id: id,
},
context: target,
dataType: 'html',
success: function(html) {
this.replaceWith(html);
}
});
$(this).dialog('close');
};
buttons[mw.message( 'ajaxcomments-cancel' ).escaped()] = function() { $(this).dialog('close'); };
$('<div>' + mw.message( 'ajaxcomments-confirmdel' ).escaped() + '</div>').dialog({
modal: true,
resizable: false,
width: 400,
title: mw.message( 'ajaxcomments-confirm' ).escaped(),
buttons: buttons
});
};
/**
* Disable the passed input box, retrieve the wikitext source via ajax, then populate and enable the input
*/
window.ajaxcomment_source = function(id, target) {
target.attr('disabled',true);
$.ajax({
type: 'GET',
url: mw.util.wikiScript(),
data: {
action: 'ajaxcomments',
title: mw.config.get('wgPageName'),
cmd: 'src',
id: id,
},
context: target,
dataType: 'json',
success: function(json) {
this.val(json.text);
this.attr('disabled',false);
}
});
};
/**
* Send a request to like/dislike an item
* - the returned response is the new like/dislike links
*/
window.ajaxcomment_like = function(id, val) {
var target = $('#ajaxcomments-' + id);
$.ajax({
type: 'GET',
url: mw.util.wikiScript(),
data: {
action: 'ajaxcomments',
title: mw.config.get('wgPageName'),
cmd: 'like',
id: id,
text: val
},
context: target,
dataType: 'html',
success: function(html) {
// If something is returned, replace the like/dislike links with it
if(html) {
$('#ajaxcomment-like',this).first().remove();
$('#ajaxcomment-dislike',this).first().replaceWith(html);
}
}
});
};
/**
* Open a comment input box at the passed element location
*/
window.ajaxcomment_textinput = function(e, cmd) {
ajaxcomment_cancel();
var html = '<div id="ajaxcomment-input" class="ajaxcomment-input-' + cmd + '"><textarea></textarea><br />';
html += '<input type="button" onclick="ajaxcomment_submit(this,\'' + cmd + '\')" value="' + mw.message( 'ajaxcomments-post' ).escaped() + '" />';
html += '<input type="button" onclick="ajaxcomment_cancel()" value="' + mw.message( 'ajaxcomments-cancel' ).escaped() + '" />';
html += '</div>';
e.after(html);
};
/**
* Remove any current comment input box
*/
window.ajaxcomment_cancel = function() {
$('#ajaxcomment-input').remove();
$('.ajaxcomment-text').show();
};
/**
* Submit a comment command to the server
* - e is the button element that was clicked
* - cmd will be add, reply or edit
*/
window.ajaxcomment_submit = function(e, cmd) {
e = $(e);
var target;
var id = 0;
var text = '';
// If it's an add, create the target at the end
if( cmd == 'add' ) {
$('#ajaxcomment-add').parent().after('<div id="ajaxcomments-new"></div>');
target = $('#ajaxcomments-new');
text = $('#ajaxcomment-input textarea').val();
}
// If it's an edit, create the target as the current comment
if( cmd == 'edit' ) {
var c = e.parent().parent();
target = $('.ajaxcomment-text', c).first();
text = $('#ajaxcomment-input textarea').val();
id = c.attr('id').substr(13);
}
// If it's a reply, create the target within the current comment
if( cmd == 'reply' ) {
e.parent().before('<div id="ajaxcomments-new"></div>');
target = $('#ajaxcomments-new');
text = $('#ajaxcomment-input textarea').val();
id = target.parent().attr('id').substr(13);
}
// Put a loader into the target
target.html('<div class="ajaxcomments-loader"></div>');
// Send the command and replace the loader with the new post
$.ajax({
type: 'GET',
url: mw.util.wikiScript(),
data: {
action: 'ajaxcomments',
title: mw.config.get('wgPageName'),
cmd: cmd,
id: id,
text: text
},
context: target,
dataType: 'html',
success: function(html) {
this.replaceWith(html);
ajaxcomment_cancel();
}
});
};
<file_sep><?php
/**
* jQueryUpload2 MediaWiki extension - allows files to be uploaded to the wiki or to specific pages using the jQueryFileUpload module
*
* Version 2.0.0+ summary:
* - uses MediaWiki's native files instead of just files outside the wiki
* - uses categorisation to determine uploaded files belonging to a specific page
* - i18n message determines category naming convention, e.g. "File uploaded to $1"
* - adds a general upload special page that can be used in place of Special:Upload
* - uses MediaWiki's thumbnails instead of BlueImp's code
*
* jQueryFileUpload module: https://github.com/blueimp/jQuery-File-Upload
*
* @package MediaWiki
* @subpackage Extensions
* @author <NAME> (http://www.organicdesign.co.nz/nad)
*/
if( !defined( 'MEDIAWIKI' ) ) die( 'Not an entry point.' );
define( 'JQU_VERSION', "2.0.0, 2014-11-23" );
$wgJQUploadIconPrefix = dirname( __FILE__ ) . '/icons/Farm-Fresh_file_extension_';
$wgJQUploadFileMagic = 'file';
$wgHooks['LanguageGetMagic'][] = 'jQueryUpload::onLanguageGetMagic';
$wgJQUploadFileLinkPopup = true;
$wgAjaxExportList[] = 'jQueryUpload::server';
$wgExtensionFunctions[] = 'wfJQueryUploadSetup';
$wgSpecialPages['jQueryUpload'] = 'jQueryUpload';
$wgSpecialPageGroups['jQueryUpload'] = 'media';
$wgExtensionCredits['other'][] = array(
'path' => __FILE__,
'name' => "jQueryUpload",
'descriptionmsg' => "jqueryupload-desc",
'url' => "http://www.organicdesign.co.nz/jQueryUpload",
'author' => array( "[http://www.organicdesign.co.nz/nad <NAME>]", "[http://blueimp.net <NAME>]" ),
'version' => JQU_VERSION
);
// If the query-string arg mwaction is supplied, rename action and change mwaction to action
// - this hack was required because the jQueryUpload module uses the name "action" too
if( array_key_exists( 'mwaction', $_REQUEST ) ) {
$wgJQUploadAction = array_key_exists( 'action', $_REQUEST ) ? $_REQUEST['action'] : false;
$_REQUEST['action'] = $_GET['action'] = $_POST['action'] = $_REQUEST['mwaction'];
}
$dir = dirname( __FILE__ );
$wgExtensionMessagesFiles['jQueryUpload'] = "$dir/jQueryUpload.i18n.php";
$wgExtensionMessagesFiles['jQueryUploadAlias'] = "$dir/jQueryUpload.alias.php";
require( "$dir/upload/server/php/upload.class.php" );
require( "$dir/jQueryUpload_body.php" );
function wfJQueryUploadSetup() {
global $wgJQueryUpload;
$wgJQueryUpload = new jQueryUpload();
}
<file_sep><?php
/**
* Internationalisation for AjaxComments extension
*
* @author <NAME>
* @file
* @ingroup Extensions
*/
$messages = array();
/** English
* @author Dunkley
*/
$messages['en'] = array(
'ajaxcomments-heading' => "Comments",
'ajaxcomments-comment' => "$1 comment",
'ajaxcomments-comments' => "$1 comments",
'ajaxcomments-none' => "There are no comments about this page",
'ajaxcomments-anon' => "Sign up or log in to post comments",
'ajaxcomments-sig' => "Posted by $1 at $2",
'ajaxcomments-add' => "Add comment",
'ajaxcomments-edit' => "Edit",
'ajaxcomments-reply' => "Reply",
'ajaxcomments-del' => "Delete",
'ajaxcomments-confirmdel' => "Are you sure you want to remove this comment?",
'ajaxcomments-confirm' => "Confirm",
'ajaxcomments-yes' => "Yes",
'ajaxcomments-post' => "Post",
'ajaxcomments-cancel' => "Cancel",
'ajaxcomments-add-summary' => "Comment added using the AjaxComments extension",
'ajaxcomments-reply-summary' => "Comment reply made using the AjaxComments extension",
'ajaxcomments-edit-summary' => "Comment edited using the AjaxComments extension",
'ajaxcomments-del-summary' => "Comment deleted using the AjaxComments extension",
'ajaxcomments-talkdeleted' => "Last comment removed, talk page deleted",
'ajaxcomments-like' => "$1 likes $2's comment",
'ajaxcomments-dislike' => "$1 dislikes $2's comment",
'ajaxcomments-unlike' => "$1 no longer likes $2's comment",
'ajaxcomments-undislike' => "$1 no longer dislikes $2's comment",
'ajaxcomments-nolike' => "Nobody likes this comment",
'ajaxcomments-onelike' => "$1 likes this comment",
'ajaxcomments-manylike' => "$1 and $2 like this comment",
'ajaxcomments-nodislike' => "Nobody dislikes this comment",
'ajaxcomments-onedislike' => "$1 dislikes this comment",
'ajaxcomments-manydislike' => "$1 and $2 dislike this comment",
);
$messages['pt'] = array(
'ajaxcomments-heading' => "Comentários",
'ajaxcomments-comment' => "$1 comentário",
'ajaxcomments-comments' => "$1 comentários",
'ajaxcomments-none' => "Não há comentários sobre esta página",
'ajaxcomments-anon' => "Cadastre-se ou faça login para postar comentários",
'ajaxcomments-sig' => "Postado por $1 às $2",
'ajaxcomments-add' => "Adicionar comentário",
'ajaxcomments-edit' => "Editar",
'ajaxcomments-reply' => "Responder",
'ajaxcomments-del' => "Apagar",
'ajaxcomments-confirmdel' => "Tem certeza de que deseja remover este comentário?",
'ajaxcomments-confirm' => "Confirmar",
'ajaxcomments-yes' => "Sim",
'ajaxcomments-post' => "Postar",
'ajaxcomments-cancel' => "Cancelar",
'ajaxcomments-add-summary' => "Comentário adicionado usando a extensão AjaxComments",
'ajaxcomments-reply-summary' => "Responder comentário feito usando a extensão AjaxComments",
'ajaxcomments-edit-summary' => "Comentário editado usando a extensão AjaxComments",
'ajaxcomments-del-summary' => "Comentário apagado usando a extensão AjaxComments",
'ajaxcomments-talkdeleted' => "Último comentário removido, página de discussão excluído",
'ajaxcomments-like' => "$1 gosta comentário de $2",
'ajaxcomments-dislike' => "$1 não gosta comentário de $2",
'ajaxcomments-unlike' => "$1 já não gosta comentário de $2",
'ajaxcomments-undislike' => "$1 já não não gosta comentário de $2",
'ajaxcomments-nolike' => "Ninguém gosta este comentário",
'ajaxcomments-onelike' => "$1 gosta este comentário",
'ajaxcomments-manylike' => "$1 e $2 gosta este comentário",
'ajaxcomments-nodislike' => "Ninguém não gosta este comentário",
'ajaxcomments-onedislike' => "$1 não gosta este comentário",
'ajaxcomments-manydislike' => "$1 e $2 não gosta este comentário",
);
<file_sep><?php
/**
* EmailPage extension - Send rendered HTML page to an email address or list of addresses using
* the phpmailer class from http://phpmailer.sourceforge.net/
*
* See http://www.mediawiki.org/wiki/Extension:EmailPage for installation and usage details
*
* @file
* @ingroup Extensions
* @author <NAME> [http://www.organicdesign.co.nz/nad User:Nad]
* @copyright © 2007 <NAME>
* @licence GNU General Public Licence 2.0 or later
*/
if( !defined( 'MEDIAWIKI' ) ) die( "Not an entry point." );
define( 'EMAILPAGE_VERSION', "2.3.1, 2014-02-11" );
$wgEmailPageGroup = "sysop"; // Users must belong to this group to send emails (empty string means anyone can send)
$wgEmailPageCss = false; // A minimal CSS page to embed in the email (eg. monobook/main.css without portlets, actions etc)
$wgEmailPageAllowRemoteAddr = array( "127.0.0.1" ); // Allow anonymous sending from these addresses
$wgEmailPageAllowAllUsers = false; // Whether to allow sending to all users (the "user" group)
$wgEmailPageToolboxLink = true; // Add a link to the sidebar toolbox?
$wgEmailPageActionLink = true; // Add a link to the actions links?
$wgEmailPageSepPattern = "|[\\x00-\\x20,;*]+|"; // Regular expression for splitting emails
$wgEmailPageNoLinks = false; // Change links in message to spans if set
if( $wgEmailPageGroup ) $wgGroupPermissions['sysop'][$wgEmailPageGroup] = true;
if( isset( $_SERVER['SERVER_ADDR'] ) ) $wgEmailPageAllowRemoteAddr[] = $_SERVER['SERVER_ADDR'];
$dir = dirname( __FILE__ );
$wgAutoloadClasses['SpecialEmailPage'] = "$dir/EmailPage_body.php";
$wgExtensionMessagesFiles['EmailPage'] = "$dir/EmailPage.i18n.php";
$wgExtensionAliasesFiles['EmailPage'] = "$dir/EmailPage.alias.php";
$wgSpecialPages['EmailPage'] = "SpecialEmailPage";
$wgExtensionCredits['specialpage'][] = array(
'path' => __FILE__,
'name' => "EmailPage",
'author' => "[http://www.organicdesign.co.nz/nad User:Nad]",
'descriptionmsg' => "ea-desc",
'url' => "http://www.mediawiki.org/wiki/Extension:EmailPage",
'version' => EMAILPAGE_VERSION
);
// If form has been posted, include the phpmailer class
if( isset( $_REQUEST['ea-send'] ) ) {
if( $files = glob( "$dir/*/class.phpmailer.php" ) ) require_once( $files[0] );
else die( "PHPMailer class not found!" );
}
// Add toolbox and action links
if( $wgEmailPageToolboxLink ) $wgHooks['SkinTemplateToolboxEnd'][] = 'wfEmailPageToolboxLink';
if( $wgEmailPageActionLink ) {
$wgHooks['SkinTemplateTabs'][] = 'wfEmailPageActionLink';
$wgHooks['SkinTemplateNavigation'][] = 'wfEmailPageActionLinkVector';
}
function wfEmailPageToolboxLink() {
global $wgTitle, $wgUser, $wgEmailPageGroup;
if ( is_object( $wgTitle ) && $wgUser->isLoggedIn() && ( empty( $wgEmailPageGroup ) || in_array( $wgEmailPageGroup, $wgUser->getEffectiveGroups() ) ) ) {
$url = htmlspecialchars( SpecialPage::getTitleFor( 'EmailPage' )->getLocalURL( array( 'ea-title' => $wgTitle->getPrefixedText() ) ) );
echo( "<li><a href=\"$url\">" . wfMsg( 'emailpage' ) . "</a></li>" );
}
return true;
}
function wfEmailPageActionLink( $skin, &$actions ) {
global $wgTitle, $wgUser, $wgEmailPageGroup;
if( is_object( $wgTitle ) && $wgUser->isLoggedIn() && ( empty( $wgEmailPageGroup ) || in_array( $wgEmailPageGroup, $wgUser->getEffectiveGroups() ) ) ) {
$url = SpecialPage::getTitleFor( 'EmailPage' )->getLocalURL( array( 'ea-title' => $wgTitle->getPrefixedText() ) );
$actions['email'] = array( 'text' => wfMsg( 'email' ), 'class' => false, 'href' => $url );
}
return true;
}
function wfEmailPageActionLinkVector( $skin, &$actions ) {
global $wgTitle, $wgUser, $wgEmailPageGroup;
if( is_object( $wgTitle ) && $wgUser->isLoggedIn() && ( empty( $wgEmailPageGroup ) || in_array( $wgEmailPageGroup, $wgUser->getEffectiveGroups() ) ) ) {
$url = SpecialPage::getTitleFor( 'EmailPage' )->getLocalURL( array( 'ea-title' => $wgTitle->getPrefixedText() ) );
$actions['views']['email'] = array( 'text' => wfMsg( 'email' ), 'class' => false, 'href' => $url );
}
return true;
}
<file_sep><?php
if( !defined( 'MEDIAWIKI' ) ) die();
class SpecialEmailPage extends SpecialPage {
var $recipients = array();
var $title;
var $subject;
var $message;
var $group;
var $list;
var $textonly;
var $css;
var $record;
var $db;
var $parser;
var $args;
public function __construct() {
global $wgEmailPageGroup;
parent::__construct( 'EmailPage', $wgEmailPageGroup );
}
/**
* Override SpecialPage::execute($param = '')
*/
function execute( $param ) {
global $wgOut, $wgUser, $wgRequest, $wgParser, $wgEmailPageContactsCat, $wgGroupPermissions, $wgSitename,
$wgRecordAdminCategory, $wgEmailPageCss, $wgEmailPageAllowAllUsers, $wgEmergencyContact;
$db = wfGetDB( DB_SLAVE );
$param = str_replace( '_', ' ', $param );
$this->setHeaders();
// Get info from request or set to defaults
$this->title = $wgRequest->getText( 'ea-title', $param );
$this->from = $wgRequest->getText( 'ea-from' );
$this->subject = $wgRequest->getText( 'ea-subject', wfMessage( 'ea-pagesend', $this->title, $wgSitename )->text() );
$this->message = $wgRequest->getText( 'ea-message' );
$this->group = $wgRequest->getText( 'ea-group' );
$this->to = $wgRequest->getText( 'ea-to' );
$this->cc = $wgRequest->getText( 'ea-cc' );
$this->textonly = $wgRequest->getText( 'ea-textonly', false );
$this->css = $wgRequest->getText( 'ea-css', $wgEmailPageCss );
$this->record = $wgRequest->getText( 'ea-record', false );
$this->addcomments = $wgRequest->getText( 'ea-addcomments', false );
$this->db = $db;
// Bail if no page title to send has been specified
if( $this->title ) $wgOut->addWikiText( "===" . wfMessage( 'ea-heading', $this->title )->text() . "===" );
else return $wgOut->addWikiText( wfMessage( 'ea-nopage' )->text() );
// If the send button was clicked, attempt to send and exit
if( $wgRequest->getText( 'ea-send', false ) ) return $this->send();
// Render form
$special = SpecialPage::getTitleFor( 'EmailPage' );
$wgOut->addHTML( Xml::element( 'form', array(
'class' => 'EmailPage',
'action' => $special->getLocalURL( 'action=submit' ),
'method' => 'POST'
), null ) );
$wgOut->addHTML( "<table style=\"padding:0;margin:0;border:none;\">" );
// From (dropdown list of self and wiki addresses)
$from = "<option>$wgEmergencyContact</option>";
$ue = $wgUser->getEmail();
$from = $wgUser->isValidEmailAddr( $ue ) ? "<option>$ue</option>$from" : $from;
$wgOut->addHTML( "<tr id=\"ea-from\"><th align=\"right\">" . wfMessage( 'ea-from' )->text() . ":</th>" );
$wgOut->addHTML( "<td><select name=\"ea-from\">$from</select></td></tr>\n" );
// To
$wgOut->addHTML( "<tr id=\"ea-to\"><th align=\"right\" valign=\"top\">" . wfMessage( 'ea-to' )->text() . ":</th>" );
$wgOut->addHTML( "<td><textarea name=\"ea-to\" rows=\"2\" style=\"width:100%\">{$this->to}</textarea>" );
$wgOut->addHTML( "<br /><small><i>" . wfMessage( 'ea-to-info' )->text() . "</i></small>" );
// To group
$groups = "<option />";
foreach( array_keys( $wgGroupPermissions ) as $group ) if( $group != '*' && $group != 'user' ) {
$selected = $group == $this->group ? ' selected' : '';
$groups .= "<option$selected>$group</option>";
}
if( $wgEmailPageAllowAllUsers ) {
$selected = 'user' == $this->group ? ' selected' : '';
$groups .= "<option$selected value=\"user\">" . wfMessage( 'ea-allusers' )->text() . "</option>";
}
$wgOut->addHTML( "<div id=\"ea-group\"><select name=\"ea-group\">$groups</select>" );
$wgOut->addHTML( " <i><small>" . wfMessage( 'ea-group-info' )->text() . "</small></i></div>" );
$wgOut->addHTML( "</td></tr>" );
// Cc
$wgOut->addHTML( "<tr id=\"ea-cc\"><th align=\"right\">" . wfMessage( 'ea-cc' )->text() . ":</th>" );
$wgOut->addHTML( "<td>" .
Xml::element( 'input', array(
'type' => 'text',
'name' => 'ea-cc',
'value' => $this->cc ? $this->cc : $ue,
'style' => "width:100%"
) )
. "</td></tr>" );
// Subject
$wgOut->addHTML( "<tr id=\"ea-subject\"><th align=\"right\">" . wfMessage( 'ea-subject' )->text() . ":</th>" );
$wgOut->addHTML( "<td>" .
Xml::element( 'input', array(
'type' => 'text',
'name' => 'ea-subject',
'value' => $this->subject,
'style' => "width:100%"
) )
. "</td></tr>" );
// Message
$wgOut->addHTML( "<tr id=\"ea-message\"><th align=\"right\" valign=\"top\">" . wfMessage( 'ea-message' )->text() . ":</th>" );
$wgOut->addHTML( "<td><textarea name=\"ea-message\" rows=\"3\" style=\"width:100%\">{$this->message}</textarea>" );
$wgOut->addHTML( "<br /><i><small>" . wfMessage( 'ea-message-info' )->text() . "</small></i></td></tr>" );
// Data
if( defined( 'NS_FORM' ) ) {
$options = "";
$tbl = $db->tableName( 'page' );
$res = $db->select( $tbl, 'page_id', "page_namespace = " . NS_FORM );
while( $row = $db->fetchRow( $res ) ) {
$t = Title::newFromID( $row[0] )->getText();
$selected = $t == $this->record ? ' selected' : '';
$options .= "<option$selected>$t</option>";
}
$db->freeResult( $res );
if( $options ) {
$wgOut->addHTML( "<tr id=\"ea-data\"><th align=\"right\">" . wfMessage( 'ea-data' )->text() . ":</th><td>" );
$wgOut->addHTML( "<select name=\"ea-record\"><option />$options</select>" );
$wgOut->addHTML( " <small><i>" . wfMessage( 'ea-selectrecord' )->text() . "</i></small></td></tr>" );
}
}
// Include comments checkbox
if( defined( 'AJAXCOMMENTS_VERSION' ) && AjaxComments::checkTitle( $this->title ) ) {
$wgOut->addHTML( "<tr id=\"ea-addcomments\"><th> </th><td>" );
$wgOut->addHTML( "<input type=\"checkbox\" name=\"ea-addcomments\" /> " . wfMessage( 'ea-addcomments' )->text() . "</td></tr>" );
}
// Submit buttons & hidden values
$wgOut->addHTML( "<tr><td colspan=\"2\" align=\"right\">" );
$wgOut->addHTML( Xml::element( 'input', array( 'type' => 'hidden', 'name' => 'ea-title', 'value' => $this->title ) ) );
$wgOut->addHTML( Xml::element( 'input', array( 'id' => 'ea-show', 'type' => 'submit', 'name' => 'ea-show', 'value' => wfMessage( 'ea-show' )->text() ) ) );
$wgOut->addHTML( " " );
$wgOut->addHTML( Xml::element( 'input', array( 'type' => 'submit', 'name' => 'ea-send', 'value' => wfMessage( 'ea-send' )->text() ) ) . ' ' );
$wgOut->addHTML( "</td></tr>" );
$wgOut->addHTML( "</table></form>" );
// If the show button was clicked, render the list
if( isset( $_REQUEST['ea-show'] ) ) return $this->send( false );
}
/**
* Send the message to the recipients (or just list them if arg = false)
*/
function send( $send = true ) {
global $wgOut, $wgUser, $wgParser, $wgServer, $wgScript, $wgArticlePath, $wgScriptPath, $wgEmergencyContact,
$wgEmailPageCss, $wgEmailPageGroup, $wgEmailPageAllowRemoteAddr, $wgEmailPageAllowAllUsers, $wgEmailPageSepPattern,
$wgEmailPageNoLinks;
// Set error and bail if user not in postmaster group, and request not from trusted address
if( $wgEmailPageGroup && !in_array( $wgEmailPageGroup, $wgUser->getGroups() )
&& !in_array( $_SERVER['REMOTE_ADDR'], $wgEmailPageAllowRemoteAddr ) ) {
$denied = wfMessage( 'ea-denied' )->text();
$wgOut->addWikiText( wfMessage( 'ea-error', $this->title, $denied )->text() );
return false;
}
// Get email addresses from users in selected group
$db = $this->db;
if( $this->group && ( $wgEmailPageAllowAllUsers || $this->group != 'user' ) ) {
$group = $db->addQuotes( $this->group );
$res = $this->group == 'user'
? $db->select( 'user', 'user_email', 'user_email != \'\'', __METHOD__ )
: $db->select( array( 'user', 'user_groups' ), 'user_email', "ug_user = user_id AND ug_group = $group", __METHOD__ );
foreach( $res as $row ) $this->addRecipient( $row->user_email );
}
// Recipients from the "to" and "cc" fields
foreach( preg_split( $wgEmailPageSepPattern, $this->to ) as $item ) $this->addRecipient( $item );
foreach( preg_split( $wgEmailPageSepPattern, $this->cc ) as $item ) $this->addRecipient( $item );
// Compose the wikitext content of the page to send
$title = Title::newFromText( $this->title );
$opt = new ParserOptions;
$page = new Article( $title );
$message = $page->getContent();
if( $this->message ) $message = "{$this->message}\n\n$message";
// Convert the message text to html unless textonly
if( $this->textonly == '' ) {
// Parse the wikitext using absolute URL's for local page links
$tmp = array( $wgArticlePath, $wgScriptPath, $wgScript );
$wgArticlePath = $wgServer . $wgArticlePath;
$wgScriptPath = $wgServer . $wgScriptPath;
$wgScript = $wgServer . $wgScript;
$message = $wgParser->parse( $message, $title, $opt, true, true )->getText();
list( $wgArticlePath, $wgScriptPath, $wgScript ) = $tmp;
// If add comments is set append them to the message now
if( $this->addcomments ) {
global $wgAjaxComments;
$article = new Article( Title::newFromText( $this->title ) );
$message .= $wgAjaxComments->onUnknownAction( 'ajaxcommentsinternal', $article );
}
// If no links allowed in message, change them all to spans
if( $wgEmailPageNoLinks ) $message = preg_replace( "|(</?)a([^>]*)|i", "$1u", $message );
// Get CSS content if any
if( $this->css ) {
$page = new Article( Title::newFromText( $this->css ) );
$css = "<style type='text/css'>" . $page->getContent() . "</style>";
} else $css = '';
// Create a html wrapper for the message
$doctype = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">';
$head = "<head>$css</head>";
$message = "$doctype\n<html>$head<body style=\"margin:10px\"><div id=\"bodyContent\">$message</div></body></html>";
}
// Send message or list recipients
$count = count( $this->recipients );
if( $count > 0 ) {
// Set up new mailer instance if sending
if( $send ) {
$mail = new PHPMailer();
$mail->From = $this->from;
$mail->FromName = User::whoIsReal( $wgUser->getId() );
$mail->Subject = $this->subject;
$mail->Body = $message;
$mail->IsHTML( !$this->textonly );
}
else $msg = "===" . wfMessage( 'ea-listrecipients', $count )->text() . "===";
// Loop through recipients sending or adding to list
foreach( $this->recipients as $recipient ) {
$error = '';
if( $send ) {
if( $this->record ) $mail->Body = $this->replaceFields( $message, $recipient );
$mail->AddAddress( $recipient );
if( $state = $mail->Send() ) $msg = wfMessage( 'ea-sent', $this->title, $count, $wgUser->getName() )->text();
else $error .= "Couldn't send to $recipient: {$mail->ErrorInfo}<br />\n";
$mail->ClearAddresses();
} else $msg .= "\n*[mailto:$recipient $recipient]";
if( $error ) $msg = wfMessage( 'ea-error', $this->title, $error )->text();
}
}
else $msg = wfMessage( 'ea-error', $this->title, wfMessage( 'ea-norecipients' ) )->text();
$wgOut->addWikiText( $msg );
return $send ? $state : $count;
}
/**
* Add a recipient the list if not already present
*/
function addRecipient( $recipient ) {
if( $valid = User::isValidEmailAddr( $recipient ) && !in_array( $recipient, $this->recipients ) ) {
$this->recipients[] = $recipient;
}
return $valid;
}
/**
* Replace fields in message (enclosed in single braces)
* - fields can have a default value, eg {name|default}
*/
function replaceFields( $text, $email ) {
// Scan all records of this type for the first containing matching email address
$dbr = $this->db;
$tbl = $dbr->tableName( 'templatelinks' );
$type = $dbr->addQuotes( $this->record );
$res = $dbr->select( $tbl, 'tl_from', "tl_namespace = 10 AND tl_title = $type", __METHOD__ );
while( $row = $dbr->fetchRow( $res ) ) {
$a = new Article( Title::newFromID( $row[0] ) );
$c = $a->getContent();
// Check if this records email address matches
if( preg_match( "|\s*\|\s*\w+\s*=\s*$email\s*(?=[\|\}])|s", $c ) ) {
// Extract all the fields from the content (should use examineBraces here)
$this->args = array();
preg_match_all( "|\|\s*(.+?)\s*=\s*(.*?)\s*(?=[\|\}])|s", $c, $m );
foreach( $m[1] as $i => $k ) $this->args[strtolower( $k )] = $m[2][$i];
// Replace any fields in the message text with our extracted args (should use wiki parser for this)
$text = preg_replace_callback( "|\{(\w+)(\|(.+?))?\}|s", array( $this, 'replaceField' ), $text );
break;
}
}
$dbr->freeResult( $res );
return $text;
}
/**
* Replace a single field
*/
function replaceField( $match ) {
$key = strtolower( $match[1] );
$default = isset( $match[3] ) ? $match[3] : false;
if( array_key_exists( $key, $this->args ) ) $replace = $this->args[$key];
else $replace = $default ? $default : $match[0];
return $replace;
}
}
<file_sep><?php
/**
* Internationalisation file for extension EmailPage.
*
* @file
* @ingroup Extensions
*/
$messages = array();
/** English
* @author Nad
*/
$messages['en'] = array(
'emailpage' => "E-mail page",
'ea-desc' => "Send rendered HTML page to an e-mail address or list of addresses using [http://phpmailer.sourceforge.net phpmailer]",
'ea-heading' => "E-mailing the page [[$1]]",
'ea-group-info' => "Additionally you can send the page to the members of a group",
'ea-pagesend' => "Page \"$1\" sent from $2",
'ea-nopage' => "No page was specified for sending, please use the E-mail links from the sidebar or page actions.",
'ea-norecipients' => "No valid e-mail addresses found!",
'ea-listrecipients' => "Listing {{PLURAL:$1|recipient|$1 recipients}}",
'ea-error' => "'''Error sending [[$1]]:''' ''$2''",
'ea-denied' => "Permission denied",
'ea-sent' => "Page [[$1]] sent successfully to '''$2''' {{PLURAL:$2|recipient|recipients}} by [[{{ns:User}}:$3|$3]].",
'ea-compose' => "Compose content",
'ea-show' => "View recipient list",
'ea-from' => "From",
'ea-to' => "To",
'ea-to-info' => "Email addresses can be separated with one or more semicolon, comma, asterisk or newline characters",
'ea-cc' => "Cc",
'ea-send' => "Send",
'ea-subject' => "Subject",
'ea-message' => "Message",
'ea-message-info' => "Prepend content with optional wikitext message",
'ea-style' => "Style",
'ea-selectcss' => "You can select a CSS stylesheet",
'ea-data' => "Data",
'ea-selectrecord' => "These templates can be used to fill in fields in the message content",
'ea-allusers' => "ALL USERS",
'ea-addcomments' => "Include user comments?"
);
/** Portuguese
* @author Beth
*/
$messages['pt'] = array(
'emailpage' => "Enviar página por e-mail",
'ea-desc' => "Enviar página HTML renderizado para um endereço de e-mail ou uma lista de endereços usando [http://phpmailer.sourceforge.net phpmailer]",
'ea-heading' => "Enviando a página [[$1]]",
'ea-group-info' => "Você também pode enviar a página para os membros de um grupo",
'ea-pagesend' => "Página \"$1\" enviado por $2",
'ea-nopage' => "Nenhuma página foi especificado para o envio, por favor, use os links de e-mail do menu lateral ou os ações de página.",
'ea-norecipients' => "Nenhum endereço de e-mail válido encontrado!",
'ea-listrecipients' => "{{PLURAL:$1|Lista 1 destinatário|Listagem $1 destinatários}}",
'ea-error' => "'''Erro de envio de [[$1]]:''' ''$2''",
'ea-denied' => "Permissão negada",
'ea-sent' => "Página [[$1]] enviada com sucesso para $2 {{PLURAL:$2|destinatário|destinatários}} por [[{{ns:User}}:$3|$3]].",
'ea-compose' => "Compor conteúdo",
'ea-show' => "Ver lista de destinatários",
'ea-from' => "De",
'ea-to' => "Para",
'ea-to-info' => "Endereços de e-mail podem ser separados por um ou mais ponto e vírgula, vírgula, asterisco ou caracteres de nova linha",
'ea-cc' => "Cc",
'ea-send' => "Enviar",
'ea-subject' => "Assunto",
'ea-message' => "Mensagem",
'ea-message-info' => "Anteceder conteúdo com mensagem wikitext opcional",
'ea-style' => "Estilo",
'ea-selectcss' => "Você pode selecionar uma folha de estilo CSS",
'ea-data' => "Dados",
'ea-selectrecord' => "Estes modelos podem ser utilizados para preencher os campos no conteúdo da mensagem",
'ea-allusers' => "TODOS OS USUÁRIOS",
'ea-addcomments' => "Incluir comentários de usuários?"
);
/** Message documentation (Message documentation)
* @author <NAME>
*/
$messages['qqq'] = array(
'ea-send' => '{{Identical|Send}}',
);
/** Arabic (العربية)
* @author Meno25
* @author OsamaK
*/
$messages['ar'] = array(
'emailpage' => 'إرسال رسالة للمستخدم',
'ea-desc' => 'يرسل صفحة ناتجة ب HTML لعنوان بريد إلكتروني أو قائمة عناوين باستخدام [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== مراسلة الصفحة [[$1]] ===',
'ea-fromgroup' => 'من مجموعة:',
'ea-pagesend' => 'الصفحة "$1" أُرسلت من $2',
'ea-nopage' => 'من فضلك حدد صفحة للإرسال، على سبيل المثال "[[Special:EmailPage/{{MediaWiki:Mainpage-url}}]]".',
'ea-norecipients' => 'لم يتم إيجاد عناوين بريد إلكتروني صحيحة!',
'ea-listrecipients' => '=== {{PLURAL:$1|متلقي|$1 متلقي}} ===',
'ea-error' => "'''خطأ في إرسال [[$1]]:''' ''$2''",
'ea-denied' => 'السماح مرفوض',
'ea-sent' => "الصفحة [[$1]] تم إرسالها بنجاح إلى '''$2''' {{PLURAL:$2|متلق|متلق}} بواسطة [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'اختر المتلقين',
'ea-compose' => 'كتابة المحتوى',
'ea-selectlist' => "متلقون إضافيون كعناوين صفحات أو عناوين بريد إلكتروني
*''افصل المدخلات ب , ; * \\n
*''القائمة يمكن أن تحتوي على قوالب ودوال بارسر''",
'ea-show' => 'عرض المتلقين',
'ea-send' => 'إرسال!',
'ea-subject' => 'أدخل سطر عنوان للبريد الإلكتروني',
'ea-message' => 'إرسال المحتوى برسالة اختيارية (نص ويكي)',
'ea-selectcss' => 'اختر شريحة CSS',
);
/** Egyptian Spoken Arabic (مصرى)
* @author Meno25
*/
$messages['arz'] = array(
'emailpage' => 'إرسال رسالة للمستخدم',
'ea-desc' => 'يرسل صفحة ناتجة ب HTML لعنوان بريد إلكترونى أو قائمة عناوين باستخدام [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== مراسلة الصفحة [[$1]] ===',
'ea-fromgroup' => 'من مجموعة:',
'ea-pagesend' => 'الصفحة "$1" أُرسلت من $2',
'ea-nopage' => 'من فضلك حدد صفحة للإرسال، على سبيل المثال "[[Special:EmailPage/{{MediaWiki:Mainpage-url}}]]".',
'ea-norecipients' => 'لم يتم إيجاد عناوين بريد إلكترونى صحيحة!',
'ea-listrecipients' => '=== {{PLURAL:$1|متلقي|$1 متلقي}} ===',
'ea-error' => "'''خطأ في إرسال [[$1]]:''' ''$2''",
'ea-denied' => 'السماح مرفوض',
'ea-sent' => "الصفحة [[$1]] تم إرسالها بنجاح إلى '''$2''' {{PLURAL:$2|متلق|متلق}} بواسطة [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'اختر المتلقين',
'ea-compose' => 'كتابة المحتوى',
'ea-selectlist' => "متلقون إضافيون كعناوين صفحات أو عناوين بريد إلكترونى
*''افصل المدخلات ب , ; * \\n
*''القائمة يمكن أن تحتوي على قوالب ودوال بارسر''",
'ea-show' => 'عرض المتلقين',
'ea-send' => 'إرسال!',
'ea-subject' => 'أدخل سطر عنوان للبريد الإلكتروني',
'ea-message' => 'إرسال المحتوى برسالة اختيارية (نص ويكى)',
'ea-selectcss' => 'اختر شريحة CSS',
);
/** Bulgarian (Български)
* @author DCLXVI
*/
$messages['bg'] = array(
'ea-fromgroup' => 'От група:',
'ea-nopage' => 'Необходимо е да се посочи страница, която да бъде изпратена, напр. [[Special:EmailPage/Начална страница]].',
'ea-norecipients' => 'Не бяха намерени валидни адреси за е-поща!',
'ea-listrecipients' => '=== Списък на $1 {{PLURAL:$1|получател|получателя}} ===',
'ea-error' => "'''Грешка при изпращане на [[$1]]:''' ''$2''",
'ea-selectrecipients' => 'Избиране на получатели',
'ea-show' => 'Показване на получателите',
'ea-send' => 'Изпращане!',
'ea-selectcss' => 'Избиране на CSS стил',
);
/** German (Deutsch)
* @author Church of emacs
* @author Leithian
* @author kghbln
*/
$messages['de'] = array(
'emailpage' => "Per E-Mail versenden",
'ea-desc' => "Nutzt [http://phpmailer.worxware.com/ PHPMailer], um Seiten an E-Mail-Adressen oder E-Mail-Adresslisten zu versenden",
'ea-heading' => "E-Mail-Versand von Seite „[[$1]]“",
'ea-group-info' => "Die Seite kann zusätzlich an die Mitglieder einer Benutzergruppe gesandt werden.",
'ea-pagesend' => "Seite „$1“ (aus dem Wiki „$2“)",
'ea-nopage' => "Es wurde keine Seite zum Versenden ausgewählt. Bitte hierzu entweder den Link in der Seitenleiste oder im Aktionsmenü nutzen.",
'ea-norecipients' => "Es sind keine E-Mail-Adressen zum Versenden vorhanden!",
'ea-listrecipients' => "Der E-Mail-Versand erfolgt an {{PLURAL:$1|den folgenden Empfänger|die folgenden $1 Empfänger}}:",
'ea-error' => "'''Fehler beim Versenden von Seite „[[$1]]“:''' ''$2''",
'ea-denied' => "Der E-Mail-Versand konnte aufgrund fehlender Berechtigung nicht durchgeführt werden.",
'ea-sent' => "Die Seite „[[$1]]“ wurde erfolgreich von [[{{ns:User}}:$3|$3]] an {{PLURAL:$2|einen Empfänger|$2 Empfänger}} versandt.",
'ea-compose' => "Nachricht erstellen",
'ea-show' => "Empfängerliste anzeigen",
'ea-send' => "Seite versenden",
'ea-from' => "Von",
'ea-to' => "An",
'ea-to-info' => "E-Mail-Adressen können mit einem Semikolon, einem Komma, einem Sternchen oder einer neuen Zeile voneinander getrennt werden.",
'ea-cc' => "CC",
'ea-subject' => "Betreff",
'ea-message' => "Nachricht",
'ea-message-info' => "Der Seite eine optionale Nachricht voranstellen (in Wikitext).",
'ea-style' => "Formatierungen",
'ea-selectcss' => "CSS-Stylesheet auswählen",
'ea-data' => "Daten",
'ea-selectrecord' => "Diese Vorlagen können dazu genutzt werden die Felder im Nachrichtentext zu befüllen.",
'ea-allusers' => "Alle Benutzer",
'ea-addcomments' => "Benutzerkommentare einbeziehen?"
);
/** Esperanto (Esperanto)
* @author Yekrats
*/
$messages['eo'] = array(
'emailpage' => 'Retpoŝtigi paĝon',
'ea-heading' => '=== Retpoŝtigante paĝon [[$1]] ===',
'ea-fromgroup' => 'De grupo:',
'ea-pagesend' => 'Paĝo "$1" sendita de $2',
'ea-nopage' => 'Bonvolu enigi paĝon por retsendi, ekz-e [[Special:EmailPage/Main Page]].',
'ea-norecipients' => 'Neniaj validaj retadresoj trovitaj!',
'ea-listrecipients' => '=== Listo de $1 {{PLURAL:$1|ricevonto|ricevontoj}} ===',
'ea-error' => "'''Eraro sendante [[$1]]:''' ''$2''",
'ea-denied' => 'Malpermesite',
'ea-sent' => "Paĝo [[$1]] sendita sukcese al '''$2''' {{PLURAL:$2|ricevonto|ricevontoj}} de [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Selektu ricevontojn',
'ea-compose' => 'Skribu enhavon',
'ea-send' => 'Sendi!',
'ea-selectcss' => 'Selekti CSS-tiparŝablono',
);
/** French (Français)
* @author Grondin
*/
$messages['fr'] = array(
'emailpage' => 'Envoyer l’article par courriel',
'ea-desc' => 'Envoie le rendu d’une page HTML à une adresse électronique où à une liste d’adresses en utilisant [http://phpmailer.sourceforge.net phpmailer]',
'ea-heading' => '=== Envoi de la page [[$1]] par courrier électronique ===',
'ea-fromgroup' => 'Depuis le groupe :',
'ea-pagesend' => 'Page « $1 » envoyée depuis $2',
'ea-nopage' => 'Veuillez spécifier une page à envoyer, par exemple [[Special:EmailPage/{{MediaWiki:Mainpage-url}}]]',
'ea-norecipients' => 'Aucune adresse courriel de trouvée !',
'ea-listrecipients' => '=== Liste de $1 {{PLURAL:$1|destinataire|destinataires}} ===',
'ea-error' => "'''Erreur de l’envoi de [[$1]] :''' ''$2''",
'ea-denied' => 'Permission refusée',
'ea-sent' => "L'article [[$1]] a été envoyé avec succès à '''$2''' {{PLURAL:$2|destinataire|destinataires}} par [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Sélectionner les destinataires',
'ea-compose' => 'Composer le contenu',
'ea-selectlist' => "Destinataires supplémentaires comme les titres d'articles ou les adresses courriel
* ''séparer les articles avec , : * \\n''
* ''la liste peut contenir des modèles et des fonctions parseurs''",
'ea-show' => 'Visionner les destinataires',
'ea-send' => 'Envoyer !',
'ea-subject' => 'Entrer une ligne « objet » pour le courriel',
'ea-message' => 'Ajouter le contenu au début avec un message facultatif (texte wiki)',
'ea-selectcss' => 'Sélectionner une feuille de style CSS',
);
/** Galician (Galego)
* @author Toliño
*/
$messages['gl'] = array(
'emailpage' => 'Enviar a páxina por correo electrónico',
'ea-desc' => 'Enviar páxinas HTML renderizadas a un enderezo de correo electrónico (ou a varios correos) usando [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== Enviando a páxina "[[$1]]" ===',
'ea-fromgroup' => 'Desde o grupo:',
'ea-pagesend' => 'O artigo "$1" foi enviado desde $2',
'ea-nopage' => 'Por favor, especifique a páxina que quere enviar, por exemplo: [[Special:EmailPage/{{MediaWiki:Mainpage-url}}]].',
'ea-norecipients' => 'Non foi atopado ningún enderezo de correo electrónico válido!',
'ea-listrecipients' => '=== {{PLURAL:$1|Nome do destinatario|Listaxe dos $1 destinatarios}} ===',
'ea-error' => "'''Erro no envío de \"[[\$1]]\":''' ''\$2''",
'ea-denied' => 'Permiso denegado',
'ea-sent' => 'A páxina "[[$1]]" foi enviada con éxito a \'\'\'$2\'\'\' {{PLURAL:$2|destinatario|destinatarios}} por [[{{ns:User}}:$3|$3]].',
'ea-selectrecipients' => 'Seleccionar os destinatarios',
'ea-compose' => 'Compoñer o contido',
'ea-selectlist' => "Destinatarios adicionais como títulos de páxinas ou correos electrónicos
*''separar os ítems con , ; * \\n
*''a listaxe`pode conter modelos e funcións analíticas''",
'ea-show' => 'Amosar os destinatarios',
'ea-send' => 'Enviar!',
'ea-subject' => 'Introducir un asunto ao correo electrónico',
'ea-message' => 'Engadir o contido cunha mensaxe opcional (texto wiki)',
'ea-selectcss' => 'Seleccionar unha folla de estilo CSS',
);
/** Interlingua (Interlingua)
* @author McDutchie
*/
$messages['ia'] = array(
'emailpage' => 'Inviar pagina per e-mail',
'ea-desc' => 'Inviar le rendition HTML de un pagina a un adresse de e-mail o a un lista de adresses con [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== Invio del pagina [[$1]] per e-mail ===',
'ea-fromgroup' => 'Del gruppo:',
'ea-pagesend' => 'Pagina "$1" inviate ab $2',
'ea-nopage' => 'Per favor specifica un pagina a inviar, per exemplo "[[Special:EmailPage/{{MediaWiki:Mainpage-url}}]]".',
'ea-norecipients' => 'Nulle adresses de e-mail valide trovate!',
'ea-listrecipients' => '=== {{PLURAL:$1|Destinatario|$1 destinatarios}} ===',
'ea-error' => "'''Error durante le invio de [[$1]]:''' ''$2''",
'ea-denied' => 'Permission refusate',
'ea-sent' => "Le pagina [[$1]] ha essite inviate con successo a '''$2''' {{PLURAL:$2|destinatario|destinatarios}} per [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Seliger destinatarios',
'ea-compose' => 'Componer contento',
'ea-selectlist' => "Adde destinatarios como titulos de paginas o adresses de e-mail
*''separa le entratas con , ; * \\n
*''le lista pote continer patronos e functiones del analysator syntactic''",
'ea-show' => 'Monstrar destinatarios',
'ea-send' => 'Inviar!',
'ea-subject' => 'Entra un linea de subjecto pro le message de e-mail',
'ea-message' => 'Adjunger le contento al initio con un message facultative (texto wiki)',
'ea-selectcss' => 'Selige un folio de stilos CSS',
);
/** Khmer (ភាសាខ្មែរ)
* @author Lovekhmer
*/
$messages['km'] = array(
'emailpage' => 'ទំព័រអ៊ីមែល',
'ea-fromgroup' => 'ពីក្រុម:',
'ea-pagesend' => 'ទំព័រ"$1"ត្រូវបានបញ្ជូនពី$2',
'ea-send' => 'ផ្ញើ!',
);
/** Luxembourgish (Lëtzebuergesch)
* @author Robby
*/
$messages['lb'] = array(
'emailpage' => 'Säit per Mail schécken',
'ea-heading' => '=== Säit [[$1]] peer E-Mail verschécken ===',
'ea-fromgroup' => 'Vun der Grupp:',
'ea-pagesend' => 'D\'Säit "$1" gouf verschéckt vum $2',
'ea-norecipients' => 'Keng gëlteg E-Mailadress fonnt',
'ea-denied' => 'Rechter refuséiert',
'ea-selectrecipients' => 'Adressaten erauswielen',
'ea-show' => 'Adressate weisen',
'ea-send' => 'Schécken!',
'ea-subject' => "Gitt w.e.g. e Sujet fir d'E-Mail an",
'ea-selectcss' => "Een ''CSS Stylesheet'' auswielen",
);
/** Malayalam (മലയാളം)
* @author Shijualex
*/
$messages['ml'] = array(
'emailpage' => 'ഇമെയില് താള്',
'ea-heading' => '=== [[$1]] എന്ന താള് ഇമെയില് ചെയ്യുന്നു ===',
'ea-fromgroup' => 'ഗ്രൂപ്പ്:',
'ea-pagesend' => '$2 സംരംഭത്തില് നിന്നു "$1" എന്ന താള് അയച്ചു',
'ea-nopage' => 'അയക്കുവാന് വേണ്ടി ഒരു താള് തിരഞ്ഞെടുക്കുക. ഉദാ: [[Special:EmailPage/Main Page]]',
'ea-norecipients' => 'സാധുവായ ഇമെയില് വിലാസങ്ങള് കണ്ടില്ല!',
'ea-listrecipients' => '=== $1 {{PLURAL:$1|സ്വീകര്ത്താവിന്റെ|സ്വീകര്ത്താക്കളുടെ}} പട്ടിക ===',
'ea-error' => "'''[[$1]] അയക്കുന്നതില് പിഴവ്:''' ''$2''",
'ea-denied' => 'അനുവാദം നിഷേധിച്ചിരിക്കുന്നു',
'ea-sent' => "[[{{ns:User}}:$3|$3]] എന്ന ഉപയോക്താവ് [[$1]] എന്ന താള് വിജയകരമായി '''$2''' {{PLURAL:$2|സ്വീകര്ത്താവിനു|സ്വീകര്ത്താക്കള്ക്ക്}} അയച്ചിരിക്കുന്നു.",
'ea-selectrecipients' => 'സ്വീകര്ത്താക്കളെ തിരഞ്ഞെടുക്കുക',
'ea-compose' => 'ഉള്ളടക്കം ചേര്ക്കുക',
'ea-show' => 'സ്വീകര്ത്താക്കളെ പ്രദര്ശിപ്പിക്കുക',
'ea-send' => 'അയക്കൂ!',
'ea-subject' => 'ഇമെയിലിനു ഒരു വിഷയം/ശീര്ഷകം ചേര്ക്കുക',
);
/** Marathi (मराठी)
* @author Kaustubh
*/
$messages['mr'] = array(
'emailpage' => 'पान इ-मेल करा',
'ea-desc' => ' [http://phpmailer.sourceforge.net पीएचपी मेलर] चा वापर करून एखादे पान एखाद्या इ-मेल पत्त्यावर किंवा इ-मेल पत्त्यांच्या यादीवर पाठवा.',
'ea-heading' => '=== [[$1]] पान इ-मेल करीत आहे ===',
'ea-fromgroup' => 'गटाकडून:',
'ea-pagesend' => '$2 ने "$1" पान पाठविले',
'ea-nopage' => 'कृपया पाठविण्यासाठी एक पान निवडा, उदाहरणासाठी [[Special:EmailPage/Main Page]].',
'ea-norecipients' => 'योग्य इ-मेल पत्ता सापडला नाही!',
'ea-listrecipients' => '=== $1 {{PLURAL:$1|सदस्याची|सदस्यांची}}यादी ===',
'ea-error' => "'''पाठविण्यामध्ये त्रुटी [[$1]]:''' ''$2''",
'ea-denied' => 'परवानगी नाकारली',
'ea-sent' => "[[{{ns:User}}:$3|$3]] ने [[$1]] पान '''$2''' {{PLURAL:$2|सदस्याला|सदस्यांना}} पाठविले.",
'ea-selectrecipients' => 'सदस्य निवडा',
'ea-compose' => 'मजकूर लिहा',
'ea-selectlist' => "जास्तीचे सदस्य लेख शीर्षक किंवा इ-मेल पत्ता
*'', ; चा वापर करून वेगळे करा* \\n
*''यादी मध्ये साचे तसेच पार्सर क्रिया वापरता येतील''",
'ea-show' => 'निवडलेले सदस्य दाखवा',
'ea-send' => 'पाठवा!',
'ea-subject' => 'इ-मेल चा विषय लिहा',
'ea-message' => 'मजकूरा आधी वैकल्पिक संदेश लिहा (विकिसंज्ञा)',
'ea-selectcss' => 'सीएसएस स्टाइलशीट पाठवा',
);
/** Nahuatl (Nāhuatl)
* @author Fluence
*/
$messages['nah'] = array(
'emailpage' => 'E-mail zāzanilli',
'ea-heading' => '=== E-mailhua in zāzanilli $1 ===',
);
/** Low German (Plattdüütsch)
* @author Slomox
*/
$messages['nds'] = array(
'ea-fromgroup' => 'Vun Grupp:',
);
/** Dutch (Nederlands)
* @author Siebrand
*/
$messages['nl'] = array(
'emailpage' => 'Pagina e-mailen',
'ea-desc' => 'Stuur een gerenderde pagina naar een e-mailadres of een lijst van adressen met behulp van [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== Pagina [[$1]] e-mailen ===',
'ea-fromgroup' => 'Van groep:',
'ea-pagesend' => 'Pagina "$1" is vanuit $2 verstuurd',
'ea-nopage' => 'Geef een pagina op om te versturen, bijvoorbeeld [[Special:EmailPage/Hoofdpagina]].',
'ea-norecipients' => 'Er is geen geldig e-mailadres opgegeven!',
'ea-listrecipients' => '=== Lijst met $1 {{PLURAL:$1|ontvanger|ontvangers}} ===',
'ea-error' => "'''Fout bij het versturen van [[$1]]:''' ''$2''",
'ea-denied' => 'U hebt geen rechten om deze handeling uit te voeren',
'ea-sent' => "Pagina [[$1]] is verstuurd naar '''$2''' {{PLURAL:$2|ontvanger|ontvangers}} door [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Ontvangers selecteren',
'ea-compose' => 'Inhoud samenstellen',
'ea-selectlist' => 'Meer ontvangers als paginanamen of e-mailadressen
*\'\'u kunt adressen scheiden met ",", ";", "*", of "\\n"
*\'\'de lijst mag sjablonen en parserfuncties bevatten\'\'',
'ea-show' => 'Ontvangers weergeven',
'ea-send' => 'Versturen',
'ea-subject' => 'Voer een onderwerp in voor de e-mail',
'ea-message' => 'Laat de pagina-inhoud vooraf gaan door een bericht (in wikitekst)',
'ea-selectcss' => 'Selecteer een CSS',
);
/** Norwegian (bokmål) (Norsk (bokmål))
* @author <NAME>
*/
$messages['no'] = array(
'emailpage' => 'Send side som e-post',
'ea-desc' => 'Send HTML-side til en eller flere e-postadresser ved hjelp av [http://phpmailer.sourceforge.net/ phpmailer].',
'ea-heading' => '=== Send siden [[$1]] som e-post ===',
'ea-fromgroup' => 'Fra gruppe:',
'ea-pagesend' => 'Siden «$1» sendt fra $2',
'ea-nopage' => 'Oppgi en side du vil sende, for eksempel [[Special:EmailPage/{{MediaWiki:Mainpage-url}}]].',
'ea-norecipients' => 'Ingen gyldige e-postadresser funnet.',
'ea-listrecipients' => '=== Liste over $1 {{PLURAL:$1|mottaker|mottakere}} ===',
'ea-error' => "'''Feil under sending av [[$1]]:''' ''$2''",
'ea-denied' => 'Ingen adgang',
'ea-sent' => "Siden [[$1]] ble sendt til '''$2''' {{PLURAL:$2|mottaker|mottakere}} av [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Velg mottakere',
'ea-compose' => 'Skriv inn innhold',
'ea-selectlist' => "Ytterligere mottakere som sidetitler eller e-postadresser
* ''skill elementer med , ; * \\n
* ''listen kan inneholde maler og parserfunksjoner''",
'ea-show' => 'Vis mottakere',
'ea-send' => 'Send',
'ea-subject' => 'Skriv inn et emne for e-posten',
'ea-message' => 'Fyll innholdet med en valgfri beskjed (wikitekst)',
'ea-selectcss' => 'Angi en CSS-stilmal',
);
/** Occitan (Occitan)
* @author Cedric31
*/
$messages['oc'] = array(
'emailpage' => 'Mandar l’article per corrièr electronic',
'ea-desc' => 'Manda lo rendut d’una pagina HTML a una adreça electronica o a una tièra d’adreças en utilizant [http://phpmailer.sourceforge.net phpmailer]',
'ea-heading' => '=== Mandadís de la pagina [[$1]] per corrièr electronic ===',
'ea-fromgroup' => 'Dempuèi lo grop :',
'ea-pagesend' => 'Pagina « $1 » mandada dempuèi $2',
'ea-nopage' => 'Especificatz una pagina de mandar, per exemple [[Special:EmailPage/Acuèlh]]',
'ea-norecipients' => "Cap d'adreça de corrièr electronic pas trobada !",
'ea-listrecipients' => '=== Tièra de $1 {{PLURAL:$1|destinatari|destinataris}} ===',
'ea-error' => "'''Error del mandadís de [[$1]] :''' ''$2''",
'ea-denied' => 'Permission refusada',
'ea-sent' => "L'article [[$1]] es estat mandat amb succès a '''$2''' {{PLURAL:$2|destinatari|destinataris}} per [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Seleccionar los destinataris',
'ea-compose' => 'Compausar lo contengut',
'ea-selectlist' => "Destinataris suplementaris coma los títols d'articles o las adreças de corrièr electronic
* ''separar los articles amb , : * \\n''
* ''la tièra pòt conténer de modèls e de foncions parsaires''",
'ea-show' => 'Visionar los destinataris',
'ea-send' => 'Mandar !',
'ea-subject' => 'Entrar una linha « objècte » pel corrièr electronic',
'ea-message' => 'Apondre lo contengut al començament amb un messatge facultatiu (tèxt wiki)',
'ea-selectcss' => "Seleccionar un fuèlh d'estil CSS",
);
/** Polish (Polski)
* @author Maikking
* @author Sp5uhe
*/
$messages['pl'] = array(
'ea-desc' => 'Wyślij stronę HTML na adres e-mail lub grupę adresów za pomocą [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== Wysłanie na e-mail strony [[$1]] ===',
'ea-fromgroup' => 'Z grupy:',
'ea-pagesend' => 'Strona "$1" wysłana z $2',
'ea-nopage' => 'Wybierz stronę do wysłania, przykładowo [[Special:EmailPage/{{MediaWiki:Mainpage-url}}]].',
'ea-norecipients' => 'Nie znaleziono prawidłowego adresu e-mail.',
'ea-listrecipients' => '=== {{PLURAL:$1|Odbiorca|$1 odbiorców}} ===',
'ea-error' => "'''Błąd podczas wysyłania [[$1]]:''' ''$2''",
'ea-denied' => 'Odmowa dostępu',
'ea-sent' => "Strona [[$1]] została wysłana do '''$2''' {{PLURAL:$2|odbiorcy|odbiorców}} przez [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Wybierz odbiorców',
'ea-compose' => 'Tworzenie zawartości',
'ea-selectlist' => "Dodatkowi odbiorcy:
*''oddziel obiekty za pomocą , ; * \\n''
*''lista może zawierać szablony i funkcje parsera''",
'ea-show' => 'Pokaż odbiorców',
'ea-send' => 'Wyślij',
'ea-subject' => 'Wprowadź temat wiadomości e-mail',
'ea-message' => 'Dołączanie zawartości z dodatkową informacją.',
'ea-selectcss' => 'Wybierz styl CSS',
);
/** Romanian (Română)
* @author KlaudiuMihaila
*/
$messages['ro'] = array(
'ea-fromgroup' => 'Din grupul:',
'ea-send' => 'Trimite!',
);
/** Slovak (Slovenčina)
* @author Helix84
*/
$messages['sk'] = array(
'emailpage' => 'Poslať stránku emailom',
'ea-desc' => 'Poslať stránku vo formáte HTML na emailovú adresu alebo zoznam adries pomocou [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== Poslanie stránky [[$1]] emailom ===',
'ea-fromgroup' => 'Zo skupiny:',
'ea-pagesend' => 'Článok „$1” poslaný z $2',
'ea-nopage' => 'Prosím, uveďte stránku, ktorú chcete poslať, napr. [[Special:EmailPage/Hlavná stránka]].',
'ea-norecipients' => 'Nebola nájdená platná emailová adresa!',
'ea-listrecipients' => '=== Zoznam $1 {{PLURAL:$1|príjemcu|príjemcov}} ===',
'ea-error' => "'''Chyba pri odosielaní [[$1]]:''' ''$2''",
'ea-denied' => 'Nemáte potrebné oprávnenie',
'ea-sent' => "[[{{ns:User}}:$3|$3]] úspešne poslal stránku [[$1]] '''$2''' {{PLURAL:$2|používateľovi|používateľom}}.",
'ea-selectrecipients' => 'Vybrať príjemcov',
'ea-compose' => 'Napísať obsah správy',
'ea-selectlist' => "Ďalší príjemci vo forme názvov stránok alebo emailových adries
*''položky oddeľujte pomocu , ; * \\n
*''zoznam môže obsahovať šablóny a funkcie syntaktického analyzátora''",
'ea-show' => 'Zobraziť príjemcov',
'ea-send' => 'Poslať!',
'ea-subject' => 'Zadajte predmet emailu',
'ea-message' => 'Pred obsah pridať (nepovinne) správu (wikitext)',
'ea-selectcss' => 'Vyberte CSS štýl',
);
/** Sundanese (Basa Sunda)
* @author Irwangatot
*/
$messages['su'] = array(
'ea-send' => 'Kintun!',
);
/** Swedish (Svenska)
* @author <NAME>
* @author M.M.S.
*/
$messages['sv'] = array(
'emailpage' => 'E-posta sida',
'ea-desc' => 'Skicka en renderad HTML-sida till en e-postadress eller en lista över adresser som använder [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== E-posta sidan [[$1]] ===',
'ea-fromgroup' => 'Från grupp:',
'ea-pagesend' => 'Artikeln "$1" skickades från $2',
'ea-nopage' => 'Var god ange en sida att skicka, för exempel [[Special:EmailPage/{{MediaWiki:Mainpage-url}}]].',
'ea-norecipients' => 'Inga giltiga e-postadresser hittades!',
'ea-listrecipients' => '=== Lista över $1 {{PLURAL:$1|mottagare|mottagare}} ===',
'ea-error' => "'''Fel under sändande av [[$1]]:''' ''$2''",
'ea-denied' => 'Åtkomst nekas',
'ea-sent' => "Sidan [[$1]] har skickats till '''$2''' {{PLURAL:$2|mottagare|mottagare}} av [[{{ns:User}}:$3|$3]].",
'ea-selectrecipients' => 'Ange mottagare',
'ea-compose' => 'Komponera innehåll',
'ea-selectlist' => "Ytterligare mottagare som sidtitlar eller e-postadresser
*''separera element med, ; * \\n
*''listor kan innehålla mallar och parser-funktioner''",
'ea-show' => 'Visa mottagare',
'ea-send' => 'Skicka!',
'ea-subject' => 'Ange ett ämne för e-brevet',
'ea-message' => 'Fyll innehållet med ett valfritt meddelande (wikitext)',
'ea-selectcss' => 'Ange en CSS-stilmall',
);
/** Telugu (తెలుగు)
* @author Veeven
* @author వైజాసత్య
*/
$messages['te'] = array(
'ea-denied' => 'అనుమతిని నిరాకరించాం',
'ea-send' => 'పంపించు!',
);
/** Vietnamese (Tiếng Việt)
* @author <NAME>
* @author Vinhtantran
*/
$messages['vi'] = array(
'emailpage' => 'Trang thư điện tử',
'ea-desc' => 'Gửi trang HTML giản lược đến một địa chỉ hoặc danh sách các địa chỉ thư điện tử dùng [http://phpmailer.sourceforge.net phpmailer].',
'ea-heading' => '=== Gửi trang [[$1]] ===',
'ea-nopage' => 'Xin hãy xác định trang muốn gửi, ví dụ [[Special:EmailPage/{{MediaWiki:Mainpage-url}}]].',
'ea-norecipients' => 'Không tìm thấy địa chỉ thư điện tử hợp lệ!',
'ea-listrecipients' => '=== Danh sách $1 {{PLURAL:$1|người nhận|người nhận}} ===',
'ea-error' => "'''Lỗi khi gửi [[$1]]:''' ''$2''",
'ea-sent' => "Trang [[$1]] đã được [[{{ns:User}}:$3|$3]] gửi thành công đến '''$2''' {{PLURAL:$2|người nhận|người nhận}}.",
'ea-selectrecipients' => 'Chọn người nhận',
'ea-compose' => 'Soạn nội dung',
'ea-selectlist' => "Những người nhận khác theo tựa đề trang hoặc địa chỉ thư điện tử
*''phân cách các mục bằng , ; * \\n
*''danh sách có thể chứa tiêu bản và hàm cú pháp''",
'ea-show' => 'Hiển thị người nhận',
'ea-send' => 'Gửi!',
'ea-subject' => 'Nhập vào dòng tiêu đề cho thư điện tử',
'ea-message' => 'Gắn nội dung với thông điệp tùy chọn (văn bản wiki)',
'ea-selectcss' => 'Lựa chọn một kiểu trình bày CSS',
);
<file_sep><?php
/**
* PdfBook extension
* - Composes a book from articles in a category and exports as a PDF book
*
* See http://www.mediawiki.org/Extension:PdfBook for installation and usage details
* See http://www.organicdesign.co.nz/Extension_talk:PdfBook for development notes and disucssion
*
* Started: 2007-08-08
*
* @file
* @ingroup Extensions
* @author <NAME> [http://www.organicdesign.co.nz/nad User:Nad]
* @copyright © 2007 <NAME>
* @licence GNU General Public Licence 2.0 or later
*/
if( !defined( 'MEDIAWIKI' ) ) die( "Not an entry point." );
define( 'PDFBOOK_VERSION', "1.2.3, 2013-09-19" );
$dir = dirname( __FILE__ );
$wgAutoloadClasses['PdfBookHooks'] = $dir . '/PdfBook.hooks.php';
$wgExtensionMessagesFiles['PdfBook'] = $dir . '/PdfBook.i18n.php';
$wgExtensionCredits['parserhook'][] = array(
'path' => __FILE__,
'name' => "PdfBook",
'author' => "[http://www.organicdesign.co.nz/nad <NAME>]",
'url' => "http://www.mediawiki.org/wiki/Extension:PdfBook",
'version' => PDFBOOK_VERSION,
'descriptionmsg' => 'pdfbook-desc',
);
// Whether or not an action tab is wanted for printing to PDF
$wgPdfBookTab = false;
// Whether the files should be downloaded or view in-browser
$wgPdfBookDownload = true;
$wgHooks['UnknownAction'][] = 'PdfBookHooks::onUnknownAction';
// Hooks for pre-Vector and Vector addtabs.
$wgHooks['SkinTemplateTabs'][] = 'PdfBookHooks::onSkinTemplateTabs';
$wgHooks['SkinTemplateNavigation'][] = 'PdfBookHooks::onSkinTemplateNavigation';
// Add a new pdf log type
$wgLogTypes[] = 'pdf';
$wgLogNames ['pdf'] = 'pdflogpage';
$wgLogHeaders['pdf'] = 'pdflogpagetext';
$wgLogActions['pdf/book'] = 'pdflogentry';
<file_sep><?php
/**
* AjaxComments extension - Add comments to the end of the page that can be edited, deleted or replied to instead of using the talk pages
*
* @file
* @ingroup Extensions
* @author <NAME> [http://www.organicdesign.co.nz/nad User:Nad]
* @copyright © 2012 <NAME>
* @licence GNU General Public Licence 2.0 or later
*/
if( !defined( 'MEDIAWIKI' ) ) die( 'Not an entry point.' );
define( 'AJAXCOMMENTS_VERSION', '1.2.2, 2014-12-8' );
define( 'AJAXCOMMENTS_USER', 1 );
define( 'AJAXCOMMENTS_DATE', 2 );
define( 'AJAXCOMMENTS_TEXT', 3 );
define( 'AJAXCOMMENTS_PARENT', 4 );
define( 'AJAXCOMMENTS_REPLIES', 5 );
define( 'AJAXCOMMENTS_LIKE', 6 );
$wgAjaxCommentsLikeDislike = true; // add a like/dislike link to each comment
$wgAjaxCommentsAvatars = true; // use the gravatar service for users icons
$wgAjaxCommentsPollServer = 0; // poll the server to see if any changes to comments have been made and update if so
$wgExtensionFunctions[] = 'wfSetupAjaxComments';
$wgExtensionCredits['other'][] = array(
'path' => __FILE__,
'name' => 'AjaxComments',
'author' => '[http://www.organicdesign.co.nz/User:Nad <NAME>]',
'url' => 'http://www.mediawiki.org/wiki/Extension:AjaxComments',
'description' => 'Add comments to the end of the page that can be edited, deleted or replied to instead of using the talk pages',
'version' => AJAXCOMMENTS_VERSION
);
$wgExtensionMessagesFiles['AjaxComments'] = __DIR__ . '/AjaxComments.i18n.php';
class AjaxComments {
var $comments = array();
var $changed = false;
var $talk = false;
var $canComment = false;
function __construct() {
global $wgHooks, $wgOut, $wgResourceModules, $wgAjaxCommentsPollServer, $wgTitle, $wgExtensionAssetsPath, $wgUser;
$wgHooks['UnknownAction'][] = $this;
// Create a hook to allow external condition for whether there should be comments shown
$title = array_key_exists( 'title', $_GET ) ? Title::newFromText( $_GET['title'] ) : false;
if( !array_key_exists( 'action', $_REQUEST ) && self::checkTitle( $title ) ) $wgHooks['BeforePageDisplay'][] = $this; else $wgAjaxCommentsPollServer = -1;
// Create a hook to allow external condition for whether comments can be added or replied to (default is just user logged in)
$this->canComment = $wgUser->isLoggedIn();
wfRunHooks( 'AjaxCommentsCheckWritable', array( $title, &$this->canComment ) );
// Redirect talk pages with AjaxComments to the comments
if( is_object( $title ) && $title->getNamespace() > 0 && ($title->getNamespace()&1) ) {
$title = Title::newFromText( $title->getText(), $title->getNamespace() - 1 );
$ret = true;
wfRunHooks( 'AjaxCommentsCheckTitle', array( $title, &$ret ) );
if( $ret ) {
$wgOut->disable();
wfResetOutputBuffers();
$url = $title->getLocalUrl();
header( "Location: $url#ajaxcomments" );
exit;
}
}
// Set up JavaScript and CSS resources
$wgResourceModules['ext.ajaxcomments'] = array(
'scripts' => array( 'ajaxcomments.js' ),
'styles' => array( 'ajaxcomments.css' ),
'dependencies' => array( 'jquery.ui.dialog' ),
'localBasePath' => __DIR__,
'remoteBasePath' => $wgExtensionAssetsPath . '/' . basename( __DIR__ ),
'messages' => array(
'ajaxcomments-confirmdel',
'ajaxcomments-confirm',
'ajaxcomments-yes',
'ajaxcomments-post',
'ajaxcomments-cancel'
),
);
$wgOut->addModules( 'ext.ajaxcomments' );
// Set polling to -1 if checkTitle says comments are disabled
$wgOut->addJsConfigVars( 'wgAjaxCommentsPollServer', $wgAjaxCommentsPollServer );
}
/**
* Allow other extensions to check if a title has comments
*/
public static function checkTitle( $title ) {
$ret = true;
if( !is_object( $title ) ) $title = Title::newFromText( $title );
if( !is_object( $title ) || $title->getArticleID() == 0 || $title->isRedirect() || ($title->getNamespace()&1) ) $ret = false;
else wfRunHooks( 'AjaxCommentsCheckTitle', array( $title, &$ret ) );
return $ret;
}
/**
* Render a name at the end of the page so redirected talk pages can go there before ajax loads the content
*/
function onBeforePageDisplay( $out, $skin ) {
$out->addHtml( "<a id=\"ajaxcomments-name\" name=\"ajaxcomments\"></a>" );
return true;
}
/**
* Process the Ajax requests
* - we're bypassing the Ajax handler because we need the title and parser to be established
* - if "ajaxcommentsinternal" action is passed, all comments are returned directly as html
*/
function onUnknownAction( $action, $article ) {
if( $action == 'ajaxcomments' || $action == 'ajaxcommentsinternal' ) {
global $wgOut, $wgRequest;
if( $action == 'ajaxcomments' ) $wgOut->disable(); else $this->canComment = false;
$talk = $article->getTitle()->getTalkPage();
if( is_object( $talk ) ) {
if( $action == 'ajaxcomments' ) {
$id = $wgRequest->getText( 'id', false );
$text = $wgRequest->getText( 'text', false );
$ts = $wgRequest->getText( 'ts', '' );
$command = $wgRequest->getText( 'cmd' );
} else $id = $text = $ts = $command = '';
$this->talk = $talk;
$article = new Article( $talk );
$summary = wfMessage( "ajaxcomments-$command-summary" )->text();
// If the talk page exists, get its content and the timestamp of the latest revision
$content = '';
if( $talk->exists() ) {
$content = $article->getContent();
$this->comments = self::textToData( $content );
$latest = Revision::newFromTitle( $talk )->getTimestamp();
} else $latest = 0;
// If a timestamp is provided in the request, bail if nothings happened to the talk content since that time
if( is_numeric( $ts ) && ( $ts == $latest || $latest == 0 ) ) return true;
// Perform the command on the talk content
switch( $command ) {
case 'add':
print $this->add( $text );
break;
case 'reply':
print $this->reply( $id, $text );
break;
case 'edit':
print $this->edit( $id, $text );
break;
case 'del':
print $this->delete( $id );
print count( $this->comments ) > 0 ? '' : "<i id=\"ajaxcomments-none\">" . wfMessage( 'ajaxcomments-none' )->text() . "</i>";
break;
case 'like':
if( $summary = $this->like( $id, $text ) ) {
print $this->renderComment( $id, true );
}
break;
case 'src':
header( 'Content-Type: application/json' );
$comment = $this->comments[$id];
print '{';
print '"user":' . json_encode( $comment[AJAXCOMMENTS_USER] );
print ',"date":' . json_encode( $comment[AJAXCOMMENTS_DATE] );
print ',"text":' . json_encode( $comment[AJAXCOMMENTS_TEXT] );
print '}';
break;
// By default return the whole rendered comments area
default:
$content = '';
$n = count( $this->comments );
if( $action == 'ajaxcomments' ) {
$tsdiv = "<div id=\"ajaxcomment-timestamp\" style=\"display:none\">$latest</div>";
$content .= "<h2>" . wfMessage( 'ajaxcomments-heading' )->text() . "</h2><a name=\"ajaxcomments\"></a>$tsdiv\n";
}
$cc = "<h3 id=\"ajaxcomments-count\">";
if( $n == 1 ) $content .= $cc . wfMessage( 'ajaxcomments-comment', $n )->text() . "</h3>\n";
else if( $n > 1 ) $content .= $cc . wfMessage( 'ajaxcomments-comments', $n )->text() . "</h3>\n";
$content .= $this->renderComments();
if( $action == 'ajaxcomments' ) print $content; else return $content;
}
// If any comment data has been changed write it back to the talk article
if( $this->changed ) {
$flag = $talk->exists() ? EDIT_UPDATE : EDIT_NEW;
$article->doEdit( self::dataToText( $this->comments, $content ), $summary, $flag );
}
}
}
return true;
}
/**
* Add a new comment to the data structure
*/
function add( $text ) {
global $wgUser;
$id = uniqid();
$this->comments[$id] = array(
AJAXCOMMENTS_PARENT => false,
AJAXCOMMENTS_USER => $wgUser->getName(),
AJAXCOMMENTS_DATE => time(),
AJAXCOMMENTS_TEXT => $text,
AJAXCOMMENTS_REPLIES => array()
);
$this->changed = true;
return $this->renderComment( $id );
}
/**
* Edit an existing comment in the data structure
*/
function edit( $id, $text ) {
global $wgParser;
$this->comments[$id][AJAXCOMMENTS_TEXT] = $text;
$html = $wgParser->parse( $text, $this->talk, new ParserOptions(), true, true )->getText();
$this->changed = true;
return "<div class=\"ajaxcomment-text\">$html</div>";
}
/**
* Add a new comment as a reply to an existing comment in the data structure
*/
function reply( $parent, $text ) {
global $wgUser;
$id = uniqid();
array_unshift( $this->comments[$parent][AJAXCOMMENTS_REPLIES], $id );
$this->comments[$id] = array(
AJAXCOMMENTS_PARENT => $parent,
AJAXCOMMENTS_USER => $wgUser->getName(),
AJAXCOMMENTS_DATE => time(),
AJAXCOMMENTS_TEXT => $text,
AJAXCOMMENTS_REPLIES => array()
);
$this->changed = true;
return $this->renderComment( $id );
}
/**
* Delete a comment amd all its replies from the data structure
*/
function delete( $id ) {
if( array_key_exists( $id, $this->comments ) ) {
// Call delete for all the replies of this comment
foreach( $this->comments[$id][AJAXCOMMENTS_REPLIES] as $child ) $this->delete( $child );
// Remove this item from the parents replies list (unless root level)
if( $parent = $this->comments[$id][AJAXCOMMENTS_PARENT] ) {
$i = array_search( $id, $this->comments[$parent][AJAXCOMMENTS_REPLIES] );
if( $i !== false ) unset( $this->comments[$parent][AJAXCOMMENTS_REPLIES][$i] );
}
// Remove this comment from the data
unset( $this->comments[$id] );
// If there are no comments now, delete the page
if( count( $this->comments ) == 0 ) {
$article = new Article( $this->talk );
$article->doDelete( wfMessage( 'ajaxcomments-talkdeleted' )->text() );
}
// Otherwise mark the article is changed so it gets updated
else $this->changed = true;
}
}
/**
* Like/unlike a comment returning a message describing the change
* - if val isn't passed, then the current like state of the current user and the total likes/dislikes are returned
*/
function like( $id, $val = false ) {
global $wgUser;
$name = $wgUser->getName();
$cname = $this->comments[$id][AJAXCOMMENTS_USER];
if( !array_key_exists( AJAXCOMMENTS_LIKE, $this->comments[$id] ) ) $this->comments[$id][AJAXCOMMENTS_LIKE] = array();
$like = array_key_exists( $name, $this->comments[$id][AJAXCOMMENTS_LIKE] ) ? $this->comments[$id][AJAXCOMMENTS_LIKE][$name] : 0;
// If a +1/-1 values was passed, update the like state now returing a description message
if( $val ) {
$this->changed = true;
// Remove the user if they now nolonger like or dislike, otherwise update their value
if( $like + $val == 0 ) unset( $this->comments[$id][AJAXCOMMENTS_LIKE][$name] );
else $this->comments[$id][AJAXCOMMENTS_LIKE][$name] = $like + $val;
if( $val > 0 ) {
if( $like < 0 ) return wfMessage( 'ajaxcomments-undislike', $name, $cname )->text();
else return wfMessage( 'ajaxcomments-like', $name, $cname )->text();
}
else {
if( $like > 0 ) return wfMessage( 'ajaxcomments-unlike', $name, $cname )->text();
else return wfMessage( 'ajaxcomments-dislike', $name, $cname )->text();
}
}
// No value was passed, add up the likes and dislikes
$likes = $dislikes = array();
foreach( $this->comments[$id][AJAXCOMMENTS_LIKE] as $k => $v ) if( $v > 0 ) $likes[] = $k; else $dislikes[] = $k;
return array( $like, $likes, $dislikes );
}
/**
* Render the comment data structure as HTML
* - also render a no comments message if none
* - and an add comments link at the top
*/
function renderComments() {
global $wgUser;
$html = '';
foreach( $this->comments as $id => $comment ) {
if( $comment[AJAXCOMMENTS_PARENT] === false ) $html = $this->renderComment( $id ) . $html;
}
if( $html == '' ) $html = "<i id=\"ajaxcomments-none\">" . wfMessage( 'ajaxcomments-none' )->text() . "</i><br />";
// If logged in, allow replies and editing etc
if( $this->canComment ) {
$html = "<ul class=\"ajaxcomment-links\">" .
"<li id=\"ajaxcomment-add\"><a href=\"javascript:ajaxcomment_add()\">" . wfMessage( 'ajaxcomments-add' )->text() . "</a></li>\n" .
"</ul>\n$html";
} else $html = "<i id=\"ajaxcomments-none\">" . wfMessage( 'ajaxcomments-anon' )->text() . "</i><br />$html";
return $html;
}
/**
* Render a single comment and any of it's replies
* - this is recursive - it will render any replies which could in turn contain replies etc
* - renders edit/delete link if sysop, or no replies and current user is owner
* - if likeonly is set, return only the like/dislike links
*/
function renderComment( $id, $likeonly = false ) {
global $wgParser, $wgUser, $wgLang, $wgAjaxCommentsAvatars, $wgAjaxCommentsLikeDislike;
$curName = $wgUser->getName();
$c = $this->comments[$id];
$html = '';
// Render replies
$r = '';
foreach( $c[AJAXCOMMENTS_REPLIES] as $child ) $r .= $this->renderComment( $child );
// Get total likes and unlikes
$likelink = $dislikelink = '';
list( $like, $likes, $dislikes ) = $this->like( $id );
// Render user name as link
$name = $c[AJAXCOMMENTS_USER];
$user = User::newFromName( $name );
$url = $user->getUserPage()->getLocalUrl();
$ulink = "<a href=\"$url\">$name</a>";
// Get the user's gravitar url
if( $wgAjaxCommentsAvatars && $user->isEmailConfirmed() ) {
$email = $user->getEmail();
$grav = "http://www.gravatar.com/avatar/" . md5( strtolower( $email ) ) . "?s=50&d=wavatar";
$grav = "<img src=\"$grav\" alt=\"$name\" />";
} else $grav = '';
if( !$likeonly ) $html .= "<div class=\"ajaxcomment\" id=\"ajaxcomments-$id\">\n" .
"<div class=\"ajaxcomment-sig\">" .
wfMessage( 'ajaxcomments-sig', $ulink, $wgLang->timeanddate( $c[AJAXCOMMENTS_DATE], true ) )->text() .
"</div>\n<div class=\"ajaxcomment-icon\">$grav</div><div class=\"ajaxcomment-text\">" .
$wgParser->parse( $c[AJAXCOMMENTS_TEXT], $this->talk, new ParserOptions(), true, true )->getText() .
"</div>\n<ul class=\"ajaxcomment-links\">";
// If logged in, allow replies and editing etc
if( $this->canComment ) {
if( !$likeonly ) {
// Reply link
$html .= "<li id=\"ajaxcomment-reply\"><a href=\"javascript:ajaxcomment_reply('$id')\">" . wfMessage( 'ajaxcomments-reply' )->text() . "</a></li>\n";
// If sysop, or no replies and current user is owner, add edit/del links
if( in_array( 'sysop', $wgUser->getEffectiveGroups() ) || ( $curName == $c[AJAXCOMMENTS_USER] && $r == '' ) ) {
$html .= "<li id=\"ajaxcomment-edit\"><a href=\"javascript:ajaxcomment_edit('$id')\">" . wfMessage( 'ajaxcomments-edit' )->text() . "</a></li>\n";
$html .= "<li id=\"ajaxcomment-del\"><a href=\"javascript:ajaxcomment_del('$id')\">" . wfMessage( 'ajaxcomments-del' )->text() . "</a></li>\n";
}
}
// Make the like/dislike links
if( $wgAjaxCommentsLikeDislike ) {
if( $curName != $name ) {
if( $like <= 0 ) $likelink = " onclick=\"javascript:ajaxcomment_like('$id',1)\" class=\"ajaxcomment-active\"";
if( $like >= 0 ) $dislikelink = " onclick=\"javascript:ajaxcomment_like('$id',-1)\" class=\"ajaxcomment-active\"";
}
// Add the likes and dislikes links
$clikes = count( $likes );
$cdislikes = count( $dislikes );
$likes = $this->formatNameList( $likes, 'like' );
$dislikes = $this->formatNameList( $dislikes, 'dislike' );
$html .= "<li title=\"$likes\" id=\"ajaxcomment-like\"$likelink>$clikes</li>\n";
$html .= "<li title=\"$dislikes\" id=\"ajaxcomment-dislike\"$dislikelink>$cdislikes</li>\n";
}
}
if( !$likeonly ) $html .= "</ul>$r</div>\n";
return $html;
}
/**
* Return the passed list of names as a list of "a,b,c and d"
*/
function formatNameList( $list, $msg ) {
$len = count( $list );
if( $len < 1 ) return wfMessage( "ajaxcomments-no$msg" )->text();
if( $len == 1 ) return wfMessage( "ajaxcomments-one$msg", $list[0] )->text();
$last = array_pop( $list );
return wfMessage( "ajaxcomments-many$msg", join( ', ', $list ), $last )->text();
}
/**
* Return the passed talk text as a data structure of comments
* - detect if the content needs to be base64 decoded before unserialising
*/
static function textToData( $text ) {
if( preg_match( "|== AjaxComments:DataStart ==\s*(.+?)\s*== AjaxComments:DataEnd ==|s", $text, $m ) ) {
$data = $m[1];
if( substr( $data, -1 ) != '}' ) $data = base64_decode( $data );
return unserialize( $data );
}
return array();
}
/**
* Return the passed data structure of comments as text for a talk page
* - $content is the current talk page text to integrate with
*/
static function dataToText( $data, $content ) {
$text = base64_encode( serialize( $data ) );
$text = "\n== AjaxComments:DataStart ==\n$text\n== AjaxComments:DataEnd ==";
$content = preg_replace( "|\s*== AjaxComments:DataStart ==\s*(.+)\s*== AjaxComments:DataEnd ==|s", $text, $content, 1, $count );
if( $count == 0 ) $content .= $text;
return $content;
}
}
// $wgAjaxComments can be set to false prior to extension setup to disable comments on this page
function wfSetupAjaxComments() {
global $wgAjaxComments;
$wgAjaxComments = new AjaxComments();
}
<file_sep><?php
/**
* Internationalisation for Bliki extension
*
* @author <NAME>
* @file
* @ingroup Extensions
*/
$messages = array();
/** English
* @author Dunkley
*/
$messages['en'] = array(
'blikifeed' => "Bliki feed",
'bliki-desc' => "Use this feed to track the most recent $1 at $2.",
);
<file_sep><?php
/**
* Internationalisation file for the TreeAndMenu extension.
*
* @file
* @ingroup Extensions
*/
$messages = array();
/** English */
$messages['en'] = array(
'treeandmenu-desc' => "Adds <code><nowiki>#tree</nowiki></code> and <code><nowiki>#menu</nowiki></code> parser functions which contain bullet-lists to be rendered as collapsible treeviews or dropdown menus.
The treeviews use the [http://www.destroydrop.com/javascripts/tree dTree] JavaScript tree menu, and the dropdown menus use [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]",
);
/** Message documentation (Message documentation)
* @author Purodha
*/
$messages['qqq'] = array(
'treeandmenu-desc' => '{{desc}}',
);
/** Asturian (asturianu)
* @author Xuacu
*/
$messages['ast'] = array(
'treeandmenu-desc' => "Amiesta les funciones d'analís <code><nowiki>#tree</nowiki></code> y <code><nowiki>#menu</nowiki></code>, que contienen llistes con asteriscos qu'apaecerán como vistes d'árbol contraíbles o menús estenderexables.
Les vistes d'árbol usen el menú d'árbol JavaScript [http://www.destroydrop.com/javascripts/tree dTree], y los menús estenderexables usen [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]",
);
/** Belarusian (Taraškievica orthography) (беларуская (тарашкевіца))
* @author EugeneZelenko
* @author Renessaince
*/
$messages['be-tarask'] = array(
'treeandmenu-desc' => 'Дадае функцыі парсэру <code><nowiki>#tree</nowiki></code> і <code><nowiki>#menu</nowiki></code>, якія зьмяшчаюць маркіраваныя сьпісы, што могуць выводзіцца як разгортваемыя дрэвы або выпадаючыя мэню.
Разгортваемыя дрэвы выкарыстоўваюць JavaScript [http://www.destroydrop.com/javascripts/tree dTree], а выпадаючыя мэню — [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** German (Deutsch)
* @author Kghbln
*/
$messages['de'] = array(
'treeandmenu-desc' => 'Fügt die Parserfunktionen <code><nowiki>#tree</nowiki></code> und <code><nowiki>#menu</nowiki></code> hinzu, die Aufzählungslisten in Form einer Baumansicht ein- und ausklappbar machen oder als Aufklappmenü darstellen können.
Die Baumansicht nutzt hierzu [http://www.destroydrop.com/javascripts/tree dTree] sowie das Aufklappmenü [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Lower Sorbian (dolnoserbski)
* @author Michawiki
*/
$messages['dsb'] = array(
'treeandmenu-desc' => 'Pśidawa parserowe funkcije <code><nowiki>#tree</nowiki></code> a <code><nowiki>#menu</nowiki></code>, kótarež zwobraznjaju nalicenja ako złožujobne bomowe naglědy abo padajuce menije.
Bomowe naglědy wužywaju JavaScriptowy bomowy meni [http://www.destroydrop.com/javascripts/tree dTree] a padajuce menije wužywaju [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Spanish (español)
* @author Armando-Martin
*/
$messages['es'] = array(
'treeandmenu-desc' => 'Añade las funciones del analizador (parser) <code><nowiki>#tree</nowiki></code> y <code><nowiki>#menu</nowiki></code>, que contienen listas con viñetass que se renderizarán como árboles contraíbles o menús desplegables.
Los árboles emplean un menú de [http://www.destroydrop.com/javascripts/árbol dTree] de JavaScript y los menús desplegables utilizan [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** French (français)
* @author Gomoko
*/
$messages['fr'] = array(
'treeandmenu-desc' => "Ajoute les fonctions d'analyse <code><nowiki>#tree</nowiki></code> et <code><nowiki>#menu</nowiki></code> qui permettent aux listes à puces d'être rendues comme des arbres pliables ou des menus déroulants.
Les arbres pliables utilisent le menu par arbre JavaScript [http://www.destroydrop.com/javascripts/tree dTree], et les menus déroulants utilisent [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]",
);
/** Galician (galego)
* @author Toliño
*/
$messages['gl'] = array(
'treeandmenu-desc' => 'Endade as funcións analíticas <code><nowiki>#tree</nowiki></code> e <code><nowiki>#menu</nowiki></code>, que conteñen listas con asteriscos que se renderizarán como árbores contraíbles ou menús despregables.
As árbores empregan o menú de árbore [http://www.destroydrop.com/javascripts/tree dTree] do JavaScript e os menús despregables usan [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Hebrew (עברית)
* @author Amire80
*/
$messages['he'] = array(
'treeandmenu-desc' => 'הוספת פונקציות המפענח <code dir="ltr"><nowiki>#tree</nowiki></code> ו־<code dir="ltr"><nowiki>#menu</nowiki></code> שמכילות רשימות תבליטים שיוצגו כעצים שאפשר לקפל או תפריטים נפתחים.
תצוגת העץ משתמשת בתפריט [http://www.destroydrop.com/javascripts/tree dTree] והפתריטים המפתחים משתמשים ב־[http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Upper Sorbian (hornjoserbsce)
* @author Michawiki
*/
$messages['hsb'] = array(
'treeandmenu-desc' => 'Přidawa parserowe funkcije <code><nowiki>#tree</nowiki></code> a <code><nowiki>#menu</nowiki></code>, kotrež naličenja jako fałdujomne štomowe napohlady abo padace menije zwobraznjeja.
Štomowe napohlady wužiwaja JavaScriptowy štomowy meni [http://www.destroydrop.com/javascripts/tree dTree] a padace menije wužiwaja [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Interlingua (interlingua)
* @author McDutchie
*/
$messages['ia'] = array(
'treeandmenu-desc' => 'Adde le functiones analysator <code><nowiki>#tree</nowiki></code> e <code><nowiki>#menu</nowiki></code> le quales contine listas a punctos a presentar como vistas arborescente plicabile o menus disrolante.
Le vistas arborescente usa le menu JavaScript arborescente [http://www.destroydrop.com/javascripts/tree dTree], e le menus disrolante usa [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Italian (italiano)
* @author Beta16
* @author Ximo17
*/
$messages['it'] = array(
'treeandmenu-desc' => 'Aggiunge le funzioni parser <code><nowiki>#tree</nowiki></code> e <code><nowiki>#menu</nowiki></code> che permettono di visualizzare gli elenchi puntati come menù ad albero o a discesa.
Per i menù ad albero utilizza il menù [http://www.destroydrop.com/javascripts/tree dTree] di JavaScript, mentre per i menù a tendina utilizza [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Korean (한국어)
* @author 아라
*/
$messages['ko'] = array(
'treeandmenu-desc' => '축소 가능한 트리보기 또는 드롭 다운 메뉴로 표시하는 순서 없는 목록을 포함하는 <code><nowiki>#tree</nowiki></code>와 <code><nowiki>#menu</nowiki></code> 파서 함수를 추가.
트리 보기는 [http://www.destroydrop.com/javascripts/tree dTree] 자바 스크립트 메뉴를 사용하고 드롭 다운 메뉴는 [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]를 사용합니다.',
);
/** Colognian (Ripoarisch)
* @author Purodha
*/
$messages['ksh'] = array(
'treeandmenu-desc' => 'Deiht de Paaserfunkßjuhne <code lang="en">#tree</code> un <code lang="en">#menu</code> en et Wiki, woh mer Leste met Punkte als op- un zohklappbaa Bäum udder als Ußwahlleste zom erongerträcke aanzeije lohße kann.
De Bäum bruche et <i lang="en">[http://www.destroydrop.com/javascripts/tree dTree]</i>-JavaSkrepp, un de Ußwahlleste zom erongerträcke bruche dä <i lang="en">[http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]</i>.',
);
/** Macedonian (македонски)
* @author Bjankuloski06
*/
$messages['mk'] = array(
'treeandmenu-desc' => 'Додава парсерски функции <code><nowiki>#tree</nowiki></code> и <code><nowiki>#menu</nowiki></code> што содржат списоци со потточки што ќе се прикажат како расклопни разгранети претстави или паѓачки менија or dropdown menus.
Разгранетите претстави го користат стеблото [http://www.destroydrop.com/javascripts/tree dTree] од JavaScript, а паѓачките менија користат [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Malay (Bahasa Melayu)
* @author Anakmalaysia
*/
$messages['ms'] = array(
'treeandmenu-desc' => 'Membubuh fungsi penghurai <code><nowiki>#tree</nowiki></code> dan <code><nowiki>#menu</nowiki></code> yang mengandungi senarai berbulet untuk dipaparkan dalam bentuk paparan pepohon boleh lipat atau juntai bawah.
Paparan pepohon menggunakan menu pepohon JavaScript [http://www.destroydrop.com/javascripts/tree dTree], sementara menu juntai bawah menggunakan [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Norwegian Bokmål (norsk (bokmål))
* @author Event
*/
$messages['nb'] = array(
'treeandmenu-desc' => 'Legger til <code><nowiki>#tree-</nowiki></code> og <code><nowiki>#menu-</nowiki></code>parser-funksjoner som inneholder punktlister som fremstilles som sammenleggbare trevisninger eller nedtrekksmenyer.
Trevisningene bruker [http://www.destroydrop.com/javascripts/tree dTree] JavaScript-tremenyen, mens nedtrekksmenyen bruker [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Dutch (Nederlands)
* @author Siebrand
*/
$messages['nl'] = array(
'treeandmenu-desc' => "Voegt de parserfuncties <code><nowiki>#tree</nowiki></code> en <code><nowiki>#menu</nowiki></code> toe die ervoor zorgen dat ongenummerde lijsten worden weergegeven als inklapbare boomstructuren of dropdownmenu's.
Hiervoor worden de componenten [http://www.destroydrop.com/javascripts/tree dTree] en [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish] gebruikt",
);
/** Polish (polski)
* @author BeginaFelicysym
*/
$messages['pl'] = array(
'treeandmenu-desc' => 'Dodaje funkcje analizator składni <code><nowiki>#tree</nowiki></code> i <code><nowiki>#menu</nowiki></code>, które zawierają listy punktowane do wyświetlenia jako zwijanej drzewidoki lub menu rozwijane.
Drzewidoki używają generowanych przez JavaScript menu drzewiastych [http://www.destroydrop.com/javascripts/tree dTree], a menu rozwijane wykorzystują [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Piedmontese (Piemontèis)
* @author Borichèt
* @author Dragonòt
*/
$messages['pms'] = array(
'treeandmenu-desc' => "A gionta le funsion d'anàlisi <code><nowiki>#tree</nowiki></code> e <code><nowiki>#menu</nowiki></code> ch'a conten-o liste ëd boton da visualisé com erbo strenzìbij o liste a ridò.
J'erbo a deuvro ël la lista për erbo JavaScript [http://www.destroydrop.com/javascripts/tree dTree], e le liste a ridò a deuvro [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]",
);
/** Portuguese (português)
* @author <NAME>
*/
$messages['pt'] = array(
'treeandmenu-desc' => 'Adiciona as funções do analisador sintáctico <code><nowiki>#tree</nowiki></code> e <code><nowiki>#menu</nowiki></code>, que contêm listas com marcadores, para serem apresentadas na forma de árvore recolhível ou menus descendentes.
As árvores usam o menu árvore [http://www.destroydrop.com/javascripts/tree dTree] do JavaScript e os menus descendentes usam [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Russian (русский)
* @author <NAME>
*/
$messages['ru'] = array(
'treeandmenu-desc' => 'Добавляет функции парсера <code><nowiki>#tree</nowiki></code> и <code><nowiki>#menu</nowiki></code>, позволяющие выводить списки как раскрываемые деревья или выпадающие меню.
Для раскрывающихся деревьев используется JavaScript-скрипт [http://www.destroydrop.com/javascripts/tree dTree], для выпадающих меню — [http://www.htmldog.com/articles/suckerfish/dropdowns/ Son of Suckerfish]',
);
/** Tagalog (Tagalog)
* @author AnakngAraw
*/
$messages['tl'] = array(
'treeandmenu-desc' => 'Nagdaragdag ng mga tungkuling pambanghay na <code><nowiki>#tree</nowiki></code> at <code><nowiki>#menu</nowiki></code> na naglalaman ng mga listahang napupungluan na ihaharap bilang isang nababaklas na mga tanawin ng puno o mga menu naibabagsak na paibaba.
Ang mga tanawin ng puno ay gumagamit ng [http://www.destroydrop.com/javascripts/tree dTree] ng menu ng puno ng JavaScript, at ang mga menung naibabagsak paibaba ay gumagamit ng [http://www.htmldog.com/articles/suckerfish/dropdowns/ Anak na Lalaki ng Suckerfish]',
);
| f66e81e1f6211024162ac3cf6f006d5c3674d03f | [
"JavaScript",
"PHP"
] | 14 | PHP | Jinsuke/extensions | 8bfdac404cfff5c2cbae0c1a3ece2d74d25024fb | 9556950505c11b742f2f5e460f8aea8621526ecf |
refs/heads/master | <file_sep>import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
import pprint
rundb = pymongo_collection('runs')
excl = ["messy","abandon"]
excl_q = [{"tags.name":{"$ne": e}} for e in excl]
print(excl_q)
# query_excluded = [ {"tags.name": "messy"} ,
# {"tags.name": "abandon"} ]
# include_tags = [{"$regex":"_sr0"},"lt_24h_after_kr"]
# include_tags_query = [{"tags.name": i} for i in include_tags]
# print(include_tags_query)
# #pprint.pprint(rundb.find_one({"$and" : query_excluded, "number":12258}))
# pprint.pprint(rundb.find_one({"$or" : include_tags_query}))
# #pprint.pprint(rundb.find_one({"tags.name":{"$regex":"_sr0"}}) )
exclude_tags = ["messy","bad", "nonsr0_configuration", "ramp down", "ramp up", "ramp_down", "ramp_up", "hot_spot","abandon"]
exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
include_tags = [{"$regex":"_sr0"},"lt_24h_after_kr"]
include_tags_query = [{"tags.name": i} for i in include_tags]
run_mode ='tpc_kr83m'
coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":'tpc_kr83m'}))
#coll = list(rundb.find({"$and" : exclude_tags_query}))
print(coll)<file_sep>from datetime import datetime as dt
import plotly.graph_objects as go
import pandas_datareader as pdr
from dash.dependencies import Input
from dash.dependencies import Output
from app.utils import getdata
def register_callbacks(dashapp):
@dashapp.callback(Output('my-graph', 'figure'), [Input('my-dropdown', 'value'), Input('version-dropdown', 'value')])
def update_graph(selected_dropdown_value, version_value):
# df = pdr.get_data_yahoo(selected_dropdown_value, start=dt(2017, 1, 1), end=dt.now())
df = getdata(selected_dropdown_value,version_value)
print('roro value',df.value)
print('roro time',df.time)
return {
'data': [{
'x': df.time,
'y': df.value,
'error_y':dict(
type='data', # value of error bar given in data coordinates
array=df.error,
visible=True)
}],
'layout': {'margin': {'l': 40, 'r': 0, 't': 20, 'b': 30}}
}
<file_sep>import argparse
from resource import RLIMIT_MSGQUEUE
import strax
import straxen
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sys
st = straxen.contexts.xenonnt()
sys.path +=['../utils/']
import xomlib as xl
def main():
parser = argparse.ArgumentParser("RunXom")
parser.add_argument("runs", nargs='+',help="Run number to process")
parser.add_argument("--container",help=" will fill the xom data base with the container str", default='unfilled')
args = parser.parse_args()
runs = args.runs
container = args.container
dflist = []
run_id = []
for r in runs:
run_id.append(str(r).zfill(6) )
dflist.append(st.get_df(str(r).zfill(6),targets = 'event_info'))
######################
### loading the df ###
######################
df = pd.concat(dflist, ignore_index=True)
##########################
### doing the analysis ###
##########################
### preparation of the variables ###
value = np.float64(df['cs1'].mean())
rms = np.float64(df['cs1'].std())
value2 = np.float64(df['cs2'].mean())
rms2 = np.float64(df['cs2'].std())
timestamp = df['time'].iloc[0]
### xomdb filling ###
# first we write a json file in a tmp/ directory
result = {}
result['run_id'] = int(run_id[0])
result['run_ids'] = run_id
result['variable_name'] = 'test_var_2_a'
result['container'] = container
result['timestamp'] = int(timestamp/1e9)
result['value'] = value
result['error'] = 0
result['chisquared'] = None
result['tag'] = 'test'
result['data'] = None
outfname = result['variable_name']+'_'+str(result['run_id']) + '_' + 'cont_' + result['container']
outjsonname = outfname +'.json'
result2 = {}
result2['run_id'] = int(run_id[0])
result2['run_ids'] = run_id
result2['variable_name'] = 'test_var_2_b'
result2['container'] = container
result2['timestamp'] = int(timestamp/1e9)
result2['value'] = value2
result2['error'] = rms2
result2['chisquared'] = None
result2['tag'] = 'test'
result2['data'] = None
outfname2 = result2['variable_name']+'_'+str(result2['run_id']) + '_' + 'cont_' + result2['container']
outjsonname2 = outfname2 +'.json'
# write the json file:
xl.SaveData(result,'./algorithms/test_var_2/tmp/' + outjsonname)
xl.SaveData(result2,'./algorithms/test_var_2/tmp/' + outjsonname2)
# write on the XOM data base at LNGS
xl.UploadDataDict(result, 'dali')
xl.UploadDataDict(result2, 'dali')
return 0
if __name__ == "__main__":
main()
<file_sep>import strax
import straxen
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from argparse import ArgumentParser
import argparse
import cutax
import sys
sys.path +=['../../../utils/']
import xomlib
def cut_energy(dat, Emin, Emax):
cut = dat[(dat <= Emax)]
cut = cut[(cut > Emin)]
return cut
def press_run(run_id):
# st = cutax.contexts.xenonnt_online(include_rucio_local=False, include_rucio_remote=True )
st = cutax.xenonnt_online(_rucio_local_path='/project/lgrandi/rucio', include_rucio_local = True)
st.storage += [strax.DataDirectory('/project2/lgrandi/xenonnt/processed', provide_run_metadata=True)]
data = st.get_df(run_id, targets =("event_info", "cut_fiducial_volume"),
selection_str= ("cut_fiducial_volume"))
Data_energy = data['e_ces']
liste = [0,1,10,100,1000,10000,100000]
Events = []
e_0_100000 = len(cut_energy(Data_energy, 0, 100000))
for i in range(len(liste)-1):
Events.append(len(cut_energy(Data_energy, liste[i], liste[i+1])))
xomresult = xomlib.Xomresult(analysis_name="event_rate",
analysis_version = "v0.0",
variable_name='e_0_100000',
variable_value=e_0_100000,
runid=int(run_id),
data= {"e_0_100000":e_0_100000, "e_0_1":Events[0], "e_1_10":Events[1],"e_10_100":Events[2], "e_100_1000":Events[3], "e_1000_10000":Events[4], "e_10000_100000":Events[5]})
del data
xomresult.save()
xomresult.xom_message(success=True)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("echo")
args = parser.parse_args()
print(args.echo)
print("start")
press_run(args.echo)
print('end')
if __name__ == "__main__":
main()
<file_sep>#!/usr/bin/env python
import os
from argparse import ArgumentParser
from socket import timeout
import json
from datetime import timezone, datetime, timedelta
import sys
import pprint
import numpy as np
import time
import configparser
import shlex
import subprocess
sys.path +=['../utils/']
import constant
def check_jobs(job_limit):
# command = "squeue -u gaior | wc --lines"
command = "squeue -u gaior | wc --lines"
execcommand = shlex.split(command)
process = subprocess.run(command,
stdout=subprocess.PIPE,
universal_newlines=True, shell=True)
nr_of_lines = process.stdout
print(int(nr_of_lines) -1)
# if nr_of_lines> job_limit:
# return False
# else:
# return True
def main():
print()
print("--------------------------------------")
print("TEST FUNCTION for BACKEND ")
print("--------------------------------------")
print()
parser = ArgumentParser("test_function")
parser.add_argument("--jobs", help="test the xomdb functions", action='store_true')
args = parser.parse_args()
if args.jobs:
check_jobs(3)
# a = 0
# prev_last_run_xom = 0
# prev_last_run_daq = 0
# try:
# last_run_xom = int(dbl.get_max("run_id",xomdatadb))
# except(IndexError):
# last_run_xom = 0
# print("latest entry in XOM DB = ",last_run_xom)
# ##############################################
# ### filling a list with analysis instances ###
# ##############################################
# analysis_list = []
# for analysis_name in analysis_names:
# an = getattr(analysismodule, analysis_name )(analysis_name)
# an.fill_from_config(xomconfig)
# analysis_list.append(an)
# if verbose:
# for an in analysis_list:
# an.printname()
# ##############################
# ### Starting the main loop ###
# ##############################
# while(a<1):
# # check for new runs in run DB
# last_run_daq = dbl.get_max("number", rundb)
# if prev_last_run_daq==last_run_daq:
# logger.info('no new run')
# print("no new run")
# continue
# if last_run_daq > prev_last_run_daq:
# prev_last_run_daq = last_run_daq
# # check if xom is up to date
# for an in analysis_list:
# try:
# last_run_xom = dbl.get_max("run_id",xomdatadb,query={"var_name": an.variable_name})
# except(IndexError):
# if verbose:
# print("new variable here")
# logger.info("new variable in xom")
# last_run_xom = 0
# #
# try:
# if last_run_daq < last_run_xom:
# raise ValueError
# elif last_run_daq == last_run_xom:
# logger.info("nothing to do, analysis %s up to date", an.variable_name)
# continue
# else:
# list_of_new_runs = list(range(last_run_xom +1, last_run_daq +1 ,1))
# except(ValueError):
# logger.error('xom run larger than last rundb run, exiting')
# print(f"last run of XOM ( = {last_run_xom}) is larger than last run of DAQ ( = {last_run_daq})...")
# print("Check xom data base, something is really wrong, will exit now")
# #produce list of runs according the analysis:
# list_of_command = an.produce_list_of_runs(list_of_new_runs)
# # # write the command list in the text file
# # for command in list_of_command:
# # container = command.split()[-1]
# # total_command = constant.singularity_base + container + " " + command + '\n'
# # #insure the file is not locked by other process:
# # fd = ll.acquire(command_file, timeout=10)
# # with open(command_file, 'a+') as f:
# # f.write(total_command)
# # # unlock the txt file
# # ll.release(fd)
# time.sleep(constant.exec_period)
if __name__ == "__main__":
main()
# # a = 1
# # pprint.pprint(f'last run {last_run_daq}')
# # pprint.pprint(f'last run xom {last_run_xom}')
# # #print("xom data = ", xomdata.find_one())
# # coll = list(rundb.find({"number" : {"$in": [39321, 39323]}, "mode":"tpc_kr83m"},{'number':1}))
# # for x in coll:
# # print(x)
# #def last_run_check():
<file_sep>import subprocess
import shlex
import numpy as np
execcommand = "python output.py"
execcommand = shlex.split(execcommand)
process = subprocess.run(execcommand,
stdout=subprocess.PIPE,
universal_newlines=True)
a = process.stdout
print(a)
b = a[a.find('[')+1 : a.find(']')].strip(' ').split(' ')
new_b = [x for x in b if x != '']
print(new_b)
new_b = np.asarray(new_b,dtype=int)
print(new_b)
<file_sep>#from app import create_app
#server = create_app()
"""Application entry point."""
from app import create_app
#def main():
server = create_app()
# app.run(host='0.0.0.0', debug=True)
if __name__ == "__main__":
# app = create_app()
server.run(host='0.0.0.0', debug=True)
# app.run(host='0.0.0.0', debug=True)
# main()
<file_sep>
# credentials:
bucket = "xom"
org = "xenon"
token = "<KEY>
# Store the URL of your InfluxDB instance
url="https://influxdb.dashboards.xenonnt.org"
<file_sep>from utilix.config import Config
from argparse import ArgumentParser
import matplotlib.pyplot as plt
import json
import straxen
def SaveData(result,filename):
with open(filename,'w') as f:
json.dump(result,f)
f.close()
def MyAnalysis(run_number):
print("Begin of MyAnalysis")
"""
Here you place your analysis code
"""
# Output
timestamp = 123456789 # here the average run time
LY = 12 # a result
sLY = 1 # its error
chisquared = 0 # the chisquare of a possible fit
fig = plt.figure(figsize=(9,9), dpi=100)
results = []
# to repeat this block for each quantity you want to monitor
result = {}
result['run_number'] = run_number
result['variable_name'] = 'lightyeld'
result['straxen_version'] = '1.2.3'
result['strax_version'] = '1.2.3'
result['name'] = "Light Yield"
result['unit'] = "[PE/keV]"
result['timestamp'] = timestamp
result['value'] = LY
result['error'] = sLY
result['chisquared'] = chisquared
fig.savefig(result['variable_name']+".png")
results.append(result)
SaveData(results,"result.json")
def main():
parser = ArgumentParser("MyAnalysis")
config = Config()
parser.add_argument("number", type=int, help="Run number to process")
args = parser.parse_args()
MyAnalysis(args.number)
if __name__ == "__main__":
main()
<file_sep>#!/usr/bin/env python
import os
from argparse import ArgumentParser
from socket import timeout
import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
from bson.json_util import dumps
import json
from datetime import timezone, datetime, timedelta
import strax
import straxen
import sys
import pprint
import numpy as np
import time
import configparser
import shlex
import subprocess
sys.path +=['../utils/']
import constant
import locklib as ll
import dblib as dbl
import importlib
import analysis
analysismodule = importlib.import_module("analysis")
rundb = pymongo_collection('runs')
def get_xom_config(configname='xomconfig.cfg'):
xomconfig = configparser.ConfigParser()
xomconfig.sections()
configfilename = configname
xomconfig.read('../utils/' + configfilename)
return xomconfig
def check_jobs(job_limit):
command = "squeue -u gaior | wc --lines"
execcommand = shlex.split(command)
process = subprocess.run(execcommand,
stdout=subprocess.PIPE,
universal_newlines=True)
nr_of_lines = int(process.result)
print(nr_of_lines)
if nr_of_lines> job_limit:
return False
else:
return True
def main():
print()
print("--------------------------------------")
print("TEST FUNCTION for BACKEND ")
print("--------------------------------------")
print()
parser = ArgumentParser("test_function")
parser.add_argument("--config", help="test the config", action='store_true')
parser.add_argument("--rundb", help="test the rundb functions", action='store_true')
parser.add_argument("--xomdb", help="test the xomdb functions", action='store_true')
parser.add_argument("--jobs", help="test the xomdb functions", action='store_true')
parser.add_argument("--verbose", help="Shows informations and statistics about the database", action='store_true')
args = parser.parse_args()
verbose = args.verbose
# connect to the Xenon run database
# rundb = dbl.connect_to_DAQ_DB('runs')
type_of_db = constant.type_of_db
xomdb = dbl.Xomdb(type_of_db,"measurementxom")
if args.config:
#############################
### sets up the xomconfig ###
#############################
xomconfig = get_xom_config(constant.configname)
analysis_names = xomconfig.sections()
print("##########################################" )
print("Reading the config file: ",xomconfig)
print("##########################################\n" )
analysis_list = []
for analysis_name in analysis_names:
an = analysis.Analysis(analysis_name)
#getattr(analysismodule, analysis_name)(analysis_name)
# an = getattr(analysismodule, analysis_name )(analysis_name)
an.fill_from_config(xomconfig)
analysis_list.append(an)
for an in analysis_list:
an.print_config()
if args.rundb:
# last_run_daq = dbl.get_max_mongodb(rundb, "number")
#exclude_tags = ["messy","bad", "nonsr0_configuration", "ramp down", "ramp up", "ramp_down", "ramp_up", "warm_spot","abandon"]
# include_tags = ["_sr1_preliminary"]
exclude_tags = []
include_tags = []
if not exclude_tags:
# exclude_tags_query = [{"tags.name":{"$ne": ""}}]
exclude_tags_query = [{}]
else:
exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
if not include_tags:
# include_tags_query = {"tags.name":{"$all": ""}}
include_tags_query = [{}]
else:
include_tags_query = [{"tags.name": i} for i in include_tags]
# exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
# print('exclude_tags_query = ', exclude_tags_query)
# include_tags = ["_sr1_preliminary"]
# run_mode = 'tpc_kr83m'
run_mode = ''
# include_tags = [""]
if not run_mode:
run_mode_query = {"$ne": " "}
else:
run_mode_query = run_mode
# run_mode_query = {"$e" + run_mode}
# coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode, "number": {"$gt":52000} }))
coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode_query, "number": {"$gt":52000} }))
# coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode}))
# coll = list(rundb.find({"number": {"$gt":52000} }))
# print(coll)
# "$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode}))
valid_runs = []
[valid_runs.append(x['number']) for x in coll]
print ("QP analysis: ", valid_runs)
print("latest entry in DAQ = ",last_run_daq)
if args.xomdb:
last_run_xom = xomdb.get_last_runid()
print("latest entry in xom = ",last_run_xom)
last_run_xomv1 = xomdb.get_last_runid_from_var("temp_v1")
print("latest entry in xom for temp_v1 = ",last_run_xomv1)
last_run_xomv2 = xomdb.get_last_runid_from_var("temp_v2")
print("latest entry in xom for temp_v2 = ",last_run_xomv2)
if args.jobs:
check_jobs(3)
if args.data:
include_tags = ['_sr1_preliminary']
exclude_tags = ['messy','bad', 'nonsr0_configuration', 'ramp down', 'ramp up', 'ramp_down', 'ramp_up', 'hot_spot','abandon']
available_type = ['event_basics', 'peak_basics']
allruns = st.select_runs(available=available_type,
exclude_tags=exclude_tags,
include_tags=include_tags,)
print("result of select run", allruns)
print(st.is_stored(run_id, 'event_basics'))
print(st.is_stored(run_id, 'peak_basics'))
check_jobs(3)
# a = 0
# prev_last_run_xom = 0
# prev_last_run_daq = 0
# try:
# last_run_xom = int(dbl.get_max("run_id",xomdatadb))
# except(IndexError):
# last_run_xom = 0
# print("latest entry in XOM DB = ",last_run_xom)
# ##############################################
# ### filling a list with analysis instances ###
# ##############################################
# analysis_list = []
# for analysis_name in analysis_names:
# an = getattr(analysismodule, analysis_name )(analysis_name)
# an.fill_from_config(xomconfig)
# analysis_list.append(an)
# if verbose:
# for an in analysis_list:
# an.printname()
# ##############################
# ### Starting the main loop ###
# ##############################
# while(a<1):
# # check for new runs in run DB
# last_run_daq = dbl.get_max("number", rundb)
# if prev_last_run_daq==last_run_daq:
# logger.info('no new run')
# print("no new run")
# continue
# if last_run_daq > prev_last_run_daq:
# prev_last_run_daq = last_run_daq
# # check if xom is up to date
# for an in analysis_list:
# try:
# last_run_xom = dbl.get_max("run_id",xomdatadb,query={"var_name": an.variable_name})
# except(IndexError):
# if verbose:
# print("new variable here")
# logger.info("new variable in xom")
# last_run_xom = 0
# #
# try:
# if last_run_daq < last_run_xom:
# raise ValueError
# elif last_run_daq == last_run_xom:
# logger.info("nothing to do, analysis %s up to date", an.variable_name)
# continue
# else:
# list_of_new_runs = list(range(last_run_xom +1, last_run_daq +1 ,1))
# except(ValueError):
# logger.error('xom run larger than last rundb run, exiting')
# print(f"last run of XOM ( = {last_run_xom}) is larger than last run of DAQ ( = {last_run_daq})...")
# print("Check xom data base, something is really wrong, will exit now")
# #produce list of runs according the analysis:
# list_of_command = an.produce_list_of_runs(list_of_new_runs)
# # # write the command list in the text file
# # for command in list_of_command:
# # container = command.split()[-1]
# # total_command = constant.singularity_base + container + " " + command + '\n'
# # #insure the file is not locked by other process:
# # fd = ll.acquire(command_file, timeout=10)
# # with open(command_file, 'a+') as f:
# # f.write(total_command)
# # # unlock the txt file
# # ll.release(fd)
# time.sleep(constant.exec_period)
if __name__ == "__main__":
main()
# # a = 1
# # pprint.pprint(f'last run {last_run_daq}')
# # pprint.pprint(f'last run xom {last_run_xom}')
# # #print("xom data = ", xomdata.find_one())
# # coll = list(rundb.find({"number" : {"$in": [39321, 39323]}, "mode":"tpc_kr83m"},{'number':1}))
# # for x in coll:
# # print(x)
# #def last_run_check():
<file_sep>#!/usr/bin/env python
import os
#import numpy as np
import time
import subprocess
import shlex
import sys
sys.path +=['../utils/']
#import locklib as ll
import utils
import constant
import dblib as dbl
import xomlib
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
from argparse import ArgumentParser
import glob
import time
from utilix.batchq import submit_job
import logging
from logging.handlers import TimedRotatingFileHandler
logger = logging.getLogger('proc_runner')
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_level = 10
handler = TimedRotatingFileHandler('../logs/proc_runner.log', when="midnight", interval=1)
logger.setLevel(log_level)
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
# add a suffix which you want
handler.suffix = "%Y%m%d"
# finally add handler to logger
logger.addHandler(handler)
import pandas as pd
def check_available(analysis_name,container,runid):
filename = constant.availability_files_folder + analysis_name + '_' + container
df = pd.read_csv(filename)
if runid in df.number.values:
return True
else:
return False
def sleep(delay, message=""):
for remaining in range(delay, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("waiting " + message + ": {:2d} seconds remaining.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\r waiting " + message + " complete! \n")
def search_in_file(filename, to_search):
with open(filename, "r", encoding='utf-8') as f:
a= f.read()
if to_search in a:
return True
else:
return False
def check_jobs(job_name=None):
username = os.environ.get("USER")
limit = constant.jobslimit
if job_name:
command = "squeue -u " + username + " -n " + job_name + " | wc --lines"
else:
command = "squeue -u " + username + " | wc --lines"
execcommand = shlex.split(command)
process = subprocess.run(command,
stdout=subprocess.PIPE,
universal_newlines=True, shell=True)
nr_of_lines = int(process.stdout) - 1
print("jobs running = ", nr_of_lines)
if nr_of_lines > limit:
sleep(120,"for jobs to finish, now " + str(nr_of_lines) + ' jobs are running')
check_jobs('xom_job')
else:
pass
# if nr_of_lines> job_limit:
# return False
# else:
# return True
stop_condition = True
type_of_db = constant.type_of_db
def main():
print()
print("--------------------------------------")
print("XOM BACKEND PROCESS RUNNER module ")
print("--------------------------------------")
print()
# submitted_jobs = 0
parser = ArgumentParser("proc_runner")
parser.add_argument("--loglevel", type=str, help="Logging level", default='INFO')
parser.add_argument("--test", help="writes and reads test database", action = 'store_true')
parser.add_argument("--skipdatacheck", help="debug purpose, skip the test of the data availability", action = 'store_true')
args = parser.parse_args()
loglevel = args.loglevel
testmode = args.test
skipdatacheck = args.skipdatacheck
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(loglevel.upper())
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(name)-20s - %(levelname)-5s - %(message)s')
ch.setFormatter(formatterch)
# add the handlers to the logger
logger.addHandler(ch)
count = 0
if testmode:
xomdataname = "test_xomdata"
xomtodoname = "test_xomtodo"
xomdonename = "test_xomdone"
xomsubmittedname = "test_xomsubmitted"
else:
xomdataname = "xomdata"
xomtodoname = "xomtodo"
xomdonename = "xomdone"
xomsubmittedname = "xomsubmitted"
xomdb = dbl.Xomdb(type_of_db,xomdataname)
xomdbtodo = dbl.Xomdb(type_of_db,xomtodoname)
xomdbdone = dbl.Xomdb(type_of_db,xomdonename)
xomdbsubmitted = dbl.Xomdb(type_of_db,xomsubmittedname)
# first check the availability. Should be replaced at some point by the query of the availability directly at the todo stage
xomconfig = utils.get_xom_config()
analysis_names = xomconfig.sections()
analysis_list = []
while(stop_condition):
if count > 0:
sleep(constant.exec_period, 'for proc_runner execution period')
count +=1
for analysis_name in analysis_names:
containers = utils.get_from_config(xomconfig,analysis_name, 'container')
exclude_tags = utils.get_from_config(xomconfig,analysis_name, 'exclude_tags')
include_tags = utils.get_from_config(xomconfig,analysis_name, 'include_tags')
available_type = utils.get_from_config(xomconfig,analysis_name, 'available_type')
args = {}
if exclude_tags:
args[' --excluded '] = exclude_tags
if include_tags:
args[' --included '] = include_tags
if available_type:
args[' --available '] = available_type
for cont in containers:
command = "python test_data.py " + ' --container ' + cont + ' --analysis ' + analysis_name
for key, value in zip(args.keys(), args.values()) :
command+= key
command+= " ".join(value)
allcommand = constant.singularity_base + cont + " " + command + '\n'
print (command)
execcommand = shlex.split(allcommand)
if not skipdatacheck:
process = subprocess.run(execcommand,
stdout=subprocess.PIPE,
universal_newlines=True)
logger.debug(process.stdout)
logger.debug(process.stderr)
# first loop over the submitted job and check their status:
submitted_tables = xomdbsubmitted.query_all()
for table in submitted_tables:
for p in table:
pval = p.values
thevariable_name = pval['variable_name']
p_analysis_name = pval['analysis_name']
p_runid = p['runid']
job_log = constant.job_folder + p_analysis_name + "_" + str(p_runid) + '.log'
logger.debug(f"testing the status of submitted job from file {job_log}")
filenames = glob.glob(job_log)
print(filenames)
if len(filenames) >1:
logger.error(f'filenames: {filenames}')
raise ValueError("two or more job files with the same name")
if len(filenames)==1:
filename = filenames[0]
if search_in_file(filename,"SUCCESSWITHXOM"):
logger.info(f"SUCCESS of JOB {filename}")
done_result = xomlib.Xomresult(measurement_name = xomdonename,
analysis_name= pval['analysis_name'],
analysis_version = pval['analysis_version'],
variable_name = pval['variable_name'],
variable_value = pval[thevariable_name],
runid = pval['runid'],
container = pval['container'],
tag = "done")
done_result.save()
xomdbsubmitted.delete_record(p)
elif search_in_file(filename,"FAILEDWITHXOM"):
logger.error(f"FAILED JOB {filename}")
done_result = xomlib.Xomresult(measurement_name = xomdonename,
analysis_name= pval['analysis_name'],
analysis_version = pval['analysis_version'],
variable_name = pval['variable_name'],
variable_value = pval[variable_name],
runid = pval['runid'],
container = pval['container'],
tag = "done_failed")
done_result.save()
xomdbsubmitted.delete_record(p)
else:
tables = xomdbtodo.query_all()
for table in tables:
for p in table:
check_jobs()
pval = p.values
logger.debug("submitted table entry", pval)
thevariable_name = pval['variable_name']
p_analysis_name = pval['analysis_name']
p_container = pval['container']
p_runid = p['runid']
# check if the data are available for that runid
is_available = check_available(p_analysis_name,p_container,p_runid)
# job_filename = p[thevariable_name]
sbatch_filename = constant.job_folder + p_analysis_name + "_" + str(p_runid)
if is_available:
code_folder = "cd " + constant.analysis_code_folder + xomconfig.get(p_analysis_name,'folder') + " \n"
print(code_folder)
command = "python " + xomconfig.get(p_analysis_name,'command')
print(command)
analysis_command = code_folder + command.replace('[run]',str(p_runid).zfill(6))
print(analysis_command)
if xomconfig.has_option(p_analysis_name,'mem_per_cpu'):
mem_per_cpu = int(xomconfig.get(p_analysis_name,'mem_per_cpu'))
else:
mem_per_cpu = 1000
log_filename = constant.job_folder + p_analysis_name + "_" + str(p_runid) +'.log'
logger.info(f'data for run {p_runid} is available, will submit the job {sbatch_filename}')
submit_job(jobstring= analysis_command,
log= log_filename,
partition = constant.job_partition,
qos = constant.job_partition,
jobname = 'xom_job',
sbatch_file = sbatch_filename,
dry_run=False,
mem_per_cpu=mem_per_cpu,
container='xenonnt-development.simg',
cpus_per_task=1,
hours=None,
node=None,
exclude_nodes=None,
)
# execcommand = "sbatch " + constant.job_folder + job_filename
# execcommand = shlex.split(execcommand)
# process = subprocess.run(execcommand,
# stdout=subprocess.PIPE,
# universal_newlines=True)
logger.info(f"submitted JOB {sbatch_filename}")
variable_name = pval['variable_name']
submitted_result = xomlib.Xomresult(measurement_name = xomsubmittedname,
analysis_name= pval['analysis_name'],
analysis_version = pval['analysis_version'],
variable_name = pval['variable_name'],
variable_value = pval[variable_name],
runid = pval['runid'],
container = pval['container'],
tag = "submitted")
submitted_result.save()
xomdbtodo.delete_record(p)
else:
logger.info(f'data for {p_runid} in NOT yet available, will wait to submit the job {sbatch_filename}')
if __name__ == "__main__":
main()
<file_sep>#!/usr/bin/env python
import os
import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
import sys
import pprint
import numpy as np
import time
import constant
from argparse import ArgumentParser
########################
### connection to DB ###
########################
def connect_to_DB(database, server):
if database == 'influxdb':
try:
client = influxdb_client.InfluxDBClient(
url=info.url,
token=info.token,
org=info.org
)
except:
print('could not connect to the DB {} '.format(database) )
elif database = 'mongodb':
try:
client == MongoClient(constant.serveraddress[server], 27017)
except:
print('could not connect to the DB from {} with address {} '.format(server, constant.serveraddress[server]))
return client
# rundb = pymongo_collection('runs')
# db_client = connect_to_DB('dali')
# xomdb = db_client['xom']
# Accessing to Data collection
xomvariablesdb = xomdb['variables']
xomdatadb = xomdb['data']
#def get_max_influxdb(client, variable_name, query=None):
def get_max(database, col, variable_name, query=None):
'''either takes a mongodb collection or a client of influxdb'''
if database == 'mongodb':
try:
if query:
max = col.find(query).sort(name_of_variable,-1).limit(1)[0][name_of_variable]
else:
max = col.find({}).sort(name_of_variable,-1).limit(1)[0][name_of_variable]
except:
print("error in get max")
else:
query_api = client.query_api()
if query :
fluxquery = query
else:
fluxquery = 'from(bucket:"xom") |> range(start: '+ -constant.query_period+ 'd) |> filter(fn: (r) => r.variable_name =='+ variable_name + ') |> sort(columns: ["runid"] ) '
max = col.query_api(fluxquery)
return max
def reset_xomdb(col = None):
col.delete_many({'run_id': {"$gt": 0}})
def find_one(col=None):
if not col:
col = xomdatadb
print(col.find_one())
def latest(col):
print(col.find().sort("_id",-1).limit(1)[0])
def main():
print()
print("--------------------------------------")
print("XOM DB LIB - Utilities for the XOM Database")
print("--------------------------------------")
print()
parser = ArgumentParser("xomlib")
parser.add_argument("server", nargs='?', type=str, default='dali', help="what server the script is run on, will change the address to connect for mongodb")
parser.add_argument("database", nargs='?', type=str, help="either mongodb or influxdb",action='store_true')
parser.add_argument("collection", nargs='?', type=str, default='xom', help="what server the script is run on, will change the address to connect for mongodb")
parser.add_argument("--reset", help="reset database, only for test purposes",action='store_true')
parser.add_argument("--find_one", help="just showing one item",action='store_true')
parser.add_argument("--latest", help="showing the latest item",action='store_true')
args = parser.parse_args()
server = args.server
if (args.reset):
reset_xomdb()
if (args.latest):
latest()
if (args.find_one):
find_one()
if __name__ == "__main__":
main()
<file_sep>#from utilix.config import Config
from argparse import ArgumentParser
import matplotlib.pyplot as plt
import numpy as np
import json
import random
#import straxen
# dummy range number up to 3000
runrange = range(39000,39300)
# light yield range [mean, sigma]:
lyrange = {'mean':15,'sigma':5}
# charge yield range [mean, sigma]:
cyrange = {'mean':10,'sigma':5}
# electron lifetime [mean, sigma]:
eltrange = {'mean':500,'sigma':20}
#container:
containers = ['development','2022.02.3','2022.02.2']
imagefolder = '/xom/images/'
def SaveData(result,filename,mode='w'):
with open(filename,mode) as f:
json.dump(result,f)
f.close()
with open(filename,'r') as f:
content = f.read()
content = content.replace('][',',')
with open(filename,'w') as f:
f.write(content)
f.close
def MyAnalysis(analysis, numberofrun,mode='a',straxversion='2.2.1'):
print("Begin of MyAnalysis")
"""
Here you place your analysis code
"""
runs = runrange[:numberofrun]
results = []
for run in runs:
# Output
# dummy start from 1/1/2020 each run is 1 one and they are consecutive
timestamp = 1577833200 + run*3600
if analysis == 'lightyield':
LY = np.random.normal(lyrange['mean'], lyrange['sigma'], 1)[0] # a result
sLY = 0.1*LY # its error
chisquared = 0 # the chisquare of a possible fit
# to repeat this block for each quantity you want to monitor
result = {}
run_id = run
result['run_id'] = run_id
result['run_ids'] = [run_id]
result['variable_name'] = 'lightyield'
result['straxen_version'] = random.choice(containers)
result['timestamp'] = timestamp
result['value'] = LY
result['error'] = sLY
result['chisquared'] = chisquared
result['tag'] = 'test'
data = np.random.normal(LY, sLY, 1000)
fig = plt.figure(figsize=(9,9), dpi=100)
plt.hist(data)
figname = result['variable_name']+'_'+str(result['run_id']) + '_' + 'strax' + result['strax_version'] + '_' + 'straxen' + result['straxen_version']+".png"
fig.savefig(imagefolder + figname)
result['figname'] = figname
print(result)
results.append(result)
if analysis == 'chargeyield':
CY = np.random.normal(cyrange['mean'], cyrange['sigma'], 1)[0] # a result
sCY = 0.1*CY # its error
chisquared = 0 # the chisquare of a possible fit
# to repeat this block for each quantity you want to monitor
result = {}
run_id = run
result['run_id'] = run_id
result['run_ids'] = [run_id]
result['variable_name'] = 'chargeyield'
result['straxen_version'] = random.choice(straxenrange)
result['strax_version'] = straxversion
result['timestamp'] = timestamp
result['value'] = CY
result['error'] = sCY
result['chisquared'] = chisquared
result['tag'] = 'test'
results.append(result)
if analysis == 'electronlifetime':
ELT = np.random.normal(eltrange['mean'], eltrange['sigma'], 1)[0] # a result
sELT = 0.1*ELT # its error
chisquared = 0 # the chisquare of a possible fit
# to repeat this block for each quantity you want to monitor
result = {}
run_id = run
result['run_id'] = run_id
result['run_ids'] = [run_id]
result['variable_name'] = 'electronlifetime'
result['straxen_version'] = random.choice(straxenrange)
result['strax_version'] = straxversion
result['timestamp'] = timestamp
result['value'] = ELT
result['error'] = sELT
result['chisquared'] = chisquared
result['tag'] = 'test'
data = np.random.normal(ELT, sELT, 1000)
fig = plt.figure(figsize=(9,9), dpi=100)
plt.hist(data,histtype='step')
figname = result['variable_name']+'_'+str(result['run_id']) + '_' + 'strax' + result['strax_version'] + '_' + 'straxen' + result['straxen_version']+".png"
fig.savefig(imagefolder + figname)
result['figname'] = figname
results.append(result)
print(results)
SaveData(results,"result.json",mode)
def main():
parser = ArgumentParser("MyAnalysis")
# config = Config()
parser.add_argument("numberofrun", type=int, help="number of runs to process")
parser.add_argument("container", type=str, help="fake container version")
parser.add_argument("analysis", type=str, choices=['lightyield', 'chargeyield','electronlifetime'], help="name of variable to be process")
parser.add_argument("--overwrite", help="name of variable to be process",action='store_true')
args = parser.parse_args()
numberofrun = args.numberofrun
analysis = args.analysis
overwrite = args.overwrite
straxversion = args.straxversion
# if numberofrun > 3000:
# raise Exception('x should not exceed 3000. The value of x was: {}'.format(numberofrun))
if overwrite:
MyAnalysis(analysis, args.numberofrun,'w',straxversion)
else:
MyAnalysis(analysis, args.numberofrun,'a',straxversion)
if __name__ == "__main__":
main()
<file_sep>"""
Example of using fcntl.flock for locking file. Some code inspired by filelock module.
copy pasted from jirihnidek/flock_example.py
"""
import os
import fcntl
import time
def acquire(lock_file,timeout):
''' timeout in second'''
open_mode = os.O_APPEND
fd = os.open(lock_file, open_mode)
pid = os.getpid()
lock_file_fd = None
start_time = current_time = time.time()
while current_time < start_time + timeout:
try:
# The LOCK_EX means that only one process can hold the lock
# The LOCK_NB means that the fcntl.flock() is not blocking
# and we are able to implement termination of while loop,
# when timeout is reached.
# More information here:
# https://docs.python.org/3/library/fcntl.html#fcntl.flock
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except (IOError, OSError):
pass
else:
lock_file_fd = fd
break
print(f' {pid} waiting for lock')
time.sleep(1.0)
current_time = time.time()
if lock_file_fd is None:
os.close(fd)
return lock_file_fd
def release(lock_file_fd):
# Do not remove the lockfile:
#
# https://github.com/benediktschmitt/py-filelock/issues/31
# https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
fcntl.flock(lock_file_fd, fcntl.LOCK_UN)
os.close(lock_file_fd)
return None
def main():
pid = os.getpid()
print(f'{pid} is waiting for lock')
myfile = 'myfile.lock'
timeout = 5
fd = acquire(myfile, timeout)
if fd is None:
print(f'ERROR: {pid} lock NOT acquired')
return -1
print(f"{pid} lock acquired...")
time.sleep(2.0)
release(fd)
print(f"{pid} lock released")
# You can run it using: python ./flock_example.py & python ./flock_example.py
if __name__ == '__main__':
main()
<file_sep>from flask_login import LoginManager
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from flask_pymongo import PyMongo
from flask_simpleldap import LDAP
db = SQLAlchemy()
migrate = Migrate()
login = LoginManager()
mongo = PyMongo()
ldap = LDAP()
<file_sep>import pandas as pd
from app.extensions import mongo
import numpy as np
from plotly.subplots import make_subplots
import plotly.graph_objects as go
def getdata_from_variable(variablename):
mydata = mongo.db['data'].find({"variable_name" :variablename})
df = pd.DataFrame(list(mydata))
return df
def getvariables():
myvar = mongo.db['variables'].find()
df = pd.DataFrame(list(myvar))
print('varibale = ', df)
return df
def getdata(current_variable, current_version):
mydata = mongo.db[current_version].find({"info.source" : "kr","info.type":"calibration"})
data = []
for d in mydata:
data.append(d['processes'][current_variable])
df = pd.DataFrame.from_dict(data, orient='columns')
return df
def getalldata():
mydata = mongo.db['data'].find()
df = pd.DataFrame(list(mydata))
straxversion = mydata.distinct('strax_version')
return df
def getstraxversion():
mydata = mongo.db['data'].find()
straxversion = mydata.distinct('strax_version')
return straxversion
import dash_html_components as html
def make_dash_table(selection, df):
""" Return a dash defintion of an HTML table from a Pandas dataframe. """
df_subset = df.loc[df["NAME"].isin(selection)]
table = []
for index, row in df_subset.iterrows():
rows = []
rows.append(html.Td([row["NAME"]]))
rows.append(html.Td([html.Img(src=row["IMG_URL"])]))
rows.append(html.Td([row["FORM"]]))
rows.append(
html.Td([html.A(href=row["PAGE"], children="Datasheet", target="_blank")])
)
table.append(html.Tr(rows))
return table
def _create_axis(axis_type, variation="Linear", title=None):
"""
Creates a 2d or 3d axis.
:params axis_type: 2d or 3d axis
:params variation: axis type (log, line, linear, etc)
:parmas title: axis title
:returns: plotly axis dictionnary
"""
if axis_type not in ["3d", "2d"]:
return None
default_style = {
"background": "rgb(255, 255, 255)",
"gridcolor": "rgb(230, 230, 230)",
"zerolinecolor": "rgb(0, 0,0)",
}
if axis_type == "3d":
return {
"showbackground": True,
"backgroundcolor": default_style["background"],
"gridcolor": default_style["gridcolor"],
"title": title,
"type": variation,
"zerolinecolor": default_style["zerolinecolor"],
}
if axis_type == "2d":
return {
"backgroundcolor": "rgb(255,255,255)",
"gridcolor": default_style["gridcolor"],
"title": title,
"zerolinecolor": default_style["zerolinecolor"],
"color": "#000000",
}
def _black_out_axis(axis):
axis["showgrid"] = True
axis["zeroline"] = True
axis["color"] = "rgb(0,0,0)"
return axis
def _create_layout(layout_type, xlabel, ylabel):
""" Return dash plot layout. """
base_layout = {
"font": {"family": "Raleway", "size":18, "color":"#7f7f7f"},
"hovermode": "closest",
"margin": {"r": 50, "t": 20, "l": 100, "b": 100},
"showlegend": False,
}
if layout_type == "scatter3d":
base_layout["scene"] = {
"xaxis": _create_axis(axis_type="3d", title=xlabel),
"yaxis": _create_axis(axis_type="3d", title=ylabel),
"zaxis": _create_axis(axis_type="3d", title=xlabel, variation="log"),
"camera": {
"up": {"x": 0, "y": 0, "z": 1},
"center": {"x": 0, "y": 0, "z": 0},
"eye": {"x": 0.08, "y": 2.2, "z": 0.08},
},
}
elif layout_type == "histogram2d":
base_layout["xaxis"] = _black_out_axis(
_create_axis(axis_type="2d", title=xlabel)
)
base_layout["yaxis"] = _black_out_axis(
_create_axis(axis_type="2d", title=ylabel)
)
base_layout["plot_bgcolor"] = "black"
base_layout["paper_bgcolor"] = "black"
base_layout["font"]["color"] = "white"
elif layout_type == "scatter":
base_layout["xaxis"] = _black_out_axis(
_create_axis(axis_type="2d", title=xlabel)
)
base_layout["yaxis"] = _black_out_axis(
_create_axis(axis_type="2d", title=ylabel)
)
# base_layout["xaxis"] = _create_axis(axis_type="2d", title=xlabel)
# base_layout["yaxis"] = _create_axis(axis_type="2d", title=ylabel)
# base_layout["plot_bgcolor"] = "white"
# base_layout["plot_bgcolor"] = "rgb(255, 255, 255)"
# base_layout["paper_bgcolor"] ="white"
#"rgb(230, 230, 230)"
return base_layout
def create_plot(
x,
xlabel,
y,
ylabel,
error,
figname
):
data = [
{
"mode":"markers",
"coloraxis":"black",
"x": x,
"y": y,
"error_y":
dict(
type='data', # value of error bar given in data coordinates
array=error,
visible=True),
"text": figname
}
]
layout = _create_layout("scatter", xlabel, ylabel)
return {"data": data, 'layout':layout}
def create_legend(title, unit):
return(title + ' [' + unit + ']')
def create_plot_with_runid(
x,
xrunid,
xlabel,
y,
ylabel,
yunit,
error,
figname
):
fig = make_subplots(rows=1, cols=1)
# vertical_spacing=0.02
x = pd.to_datetime(x, unit='s')
fig.add_trace(go.Scatter(mode='markers',x=x, y=y, error_y=dict(array=error),xaxis="x1"))
fig.add_trace(go.Scatter(mode='markers',x=xrunid, y=y, error_y=dict(array=error),xaxis="x2",line=None))
fig.update_layout(height=500, width=1000,
yaxis=dict(title= create_legend(ylabel, yunit)),
xaxis1=dict(position=1, range=[np.min(x), np.max(x)], title=dict(text=xlabel) ) ,
xaxis2=dict(position =1, range=[np.min(xrunid), np.max(xrunid)], overlaying='x',showgrid=False,title='Run ID'),
font={"family": "Raleway", "size":18, "color":"black"},showlegend= False)
fig["data"][0]["text"] = figname
# layout = _create_layout("scatter", xlabel, ylabel)
return fig
#{"data": data, 'layout':layout}
def create_plot_errorx(
x,
xlabel,
y,
ylabel,
error,
errorx,
figname
):
data = [
{
"mode":"markers",
"coloraxis":"black",
"x": x,
"y": y,
"error_y":
dict(
type='data', # value of error bar given in data coordinates
array=error,
visible=True),
"error_x":
dict(
type='data', # value of error bar given in data coordinates
array=errorx,
visible=True),
"text": figname
}
]
layout = _create_layout("scatter", xlabel, ylabel)
return {"data": data, 'layout':layout}
<file_sep>import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
from bson.json_util import dumps
import json
import logging
import sys
rundb = pymongo_collection('runs')
sys.path +=['../utils/']
import xomlib
import dblib as dbl
import time
import constant
# create logger
#module_logger = logging.getLogger('proc_compare.analysis')
class Analysis:
def __init__(self, name):
self.analysis_name = name
self.analysis_version = ""
self.variable_list = []
self.container_list = []
self.exclude_tags_list = []
self.include_tags_list = []
self.available_type_list = []
self.detectors = []
self.runwise = False
self.analysis_path = ""
self.run_mode = ""
self.command = ""
self.result = None
self.mem_per_cpu = 1000
self.logger = logging.getLogger('proc_compare.analysis.'+self.analysis_name)
self.min_run = None
self.max_run = None
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(name)-20s - %(levelname)-5s - %(message)s')
ch.setFormatter(formatterch)
# add the handlers to the logger
self.logger.addHandler(ch)
def fill_from_config(self, xomconfig):
self.analysis_version = xomconfig.get(self.analysis_name,'analysis_version')
containerlist = xomconfig.get(self.analysis_name,'container')
self.container_list = containerlist.split(',')
if xomconfig.has_option(self.analysis_name,'exclude_tags'):
exclude_tags_list = xomconfig.get(self.analysis_name,'exclude_tags')
self.exclude_tags_list = exclude_tags_list.split(',')
if xomconfig.has_option(self.analysis_name,'include_tags'):
include_tags_list = xomconfig.get(self.analysis_name,'include_tags')
self.include_tags_list = include_tags_list.split(',')
if xomconfig.has_option(self.analysis_name,'available_type'):
available_type_list = xomconfig.get(self.analysis_name,'available_type')
self.available_type_list = available_type_list.split(',')
if xomconfig.has_option(self.analysis_name,'detectors'):
detectors = xomconfig.get(self.analysis_name,'detectors')
self.detectors = detectors.split(',')
variablelist = xomconfig.get(self.analysis_name,'variable_name')
self.variable_list = variablelist.split(',')
self.runwise = xomconfig.getboolean(self.analysis_name,'runwise')
self.folder = xomconfig.get(self.analysis_name,'folder')
self.command = xomconfig.get(self.analysis_name,'command')
if xomconfig.has_option(self.analysis_name,'run_mode'):
self.run_mode = xomconfig.get(self.analysis_name,'run_mode')
if xomconfig.has_option(self.analysis_name,'min_run'):
self.min_run = int(xomconfig.get(self.analysis_name,'min_run'))
if xomconfig.has_option(self.analysis_name,'max_run'):
self.max_run = int(xomconfig.get(self.analysis_name,'max_run'))
if xomconfig.has_option(self.analysis_name,'mem_per_cpu'):
self.mem_per_cpu = int(xomconfig.get(self.analysis_name,'mem_per_cpu'))
def print_config(self):
print(f"##### Analysis: {self.analysis_name} version {self.analysis_version} ##########")
print("variable list =", self.variable_list)
print("container list =", self.container_list)
print("exclude_tags list =", self.exclude_tags_list)
print("include_tags list =", self.include_tags_list)
print("runwise = ", self.runwise)
print("command =", self.command)
print("###################################################")
def produce_job_filename(self):
utc_ts =int(time.time()*1000)
job_filename = self.analysis_name + "_" + str(utc_ts) + '.sh'
return job_filename
def write_job_file(self, job_filename, container, command):
# CONTAINER_PATH="/project2/lgrandi/xenonnt/singularity-images/xenonnt-development.simg"
# SCRIPT="/home/pellegriniq/xom_v1.py 031831"
# USER=pellegriniq
#SBATCH --output=/home/gaior/codes/xom/output/job_files/JobName-%j.out
#SBATCH --error=/home/gaior/codes/xom/output/job_files/JobName-%j.err
#RUNDIR="/home/gaior/codes/xom/backend/algorithms/scada/"
# get the example file
with open(constant.example_sub, 'r') as f:
new_lines = []
lines = f.readlines()
for l in lines:
if "CONTAINER_PATH=" in l:
ls = l.split("=")
new_line = ls[0] + '=\"' + constant.container_path + container + '\"' + "\n"
elif "SCRIPT=" in l:
ls = l.split("=")
new_line = ls[0] + '=\"'+ constant.analysis_code_folder + self.folder + command + '\"' + "\n"
elif "RUNDIR=" in l:
ls = l.split("=")
new_line = ls[0] + '=\"' + constant.analysis_code_folder + self.folder + '\"' + "\n"
new_line += "cd $RUNDIR" + "\n"
elif "SBATCH --output=" in l:
ls = l.split("=")
new_line = ls[0] + '=' + constant.job_folder + job_filename[:-3]+"-%j.out" + "\n"
elif "SBATCH --error=" in l:
ls = l.split("=")
new_line = ls[0] + '=' + constant.job_folder + job_filename[:-3]+"-%j.err" + "\n"
else:
new_line = l
new_lines.append(new_line)
with open(constant.job_folder + job_filename, 'w+') as f:
for l in new_lines:
f.write(l)
def get_valid_runs(self, last_xom, last_daq):
if not self.exclude_tags_list:
exclude_tags_query = [{}]
else:
exclude_tags_query = [{"tags.name":{"$ne": e}} for e in self.exclude_tags_list]
if not self.include_tags_list:
include_tags_query = [{}]
else:
include_tags_query = [{"tags.name": i} for i in self.include_tags_list]
if not self.run_mode:
run_mode_query = {"$ne": " "}
print("runmode query = ", run_mode_query)
else:
run_mode_query = self.run_mode
print("runmode query = ", run_mode_query)
if not self.detectors:
detectors_query = [{}]
else:
detectors_query = [{"detectors": i} for i in self.detectors]
if not self.min_run:
min_run_query = {"number":{"$gt": last_xom}}
else:
min_run = max(last_xom,self.min_run)
print("min_run = " , min_run)
min_run_query = {"number":{"$gt": min_run}}
if not self.max_run:
# max_run_query = {"$lte":last_daq}
max_run_query = {"number": {"$lte": last_daq}}
else:
max_run = min(last_daq,self.max_run)
print("max_run = " , max_run)
max_run_query = {"number": {"$lte": max_run}}
coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode_query, "$or":detectors_query,"$and" :[min_run_query, max_run_query] } ))
valid_runs = []
[valid_runs.append(x['number']) for x in coll]
print (self.analysis_name, " analysis valid runs : ", valid_runs)
return valid_runs
def produce_list_of_runs(self,last_xom, last_daq, test=False):
valid_runs = self.get_valid_runs(last_xom, last_daq)
if test:
measurement_name = 'test_xomtodo'
else:
measurement_name = 'xomtodo'
if valid_runs:
for cont in self.container_list:
valid_runs_str = str(valid_runs).strip('[]')
self.logger.info('in cont %s, appending new command for runs: %s', cont, valid_runs_str)
for r in valid_runs:
self.logger.info(f'producing job file for runid {r} for container {cont}')
command = self.command.replace('[run]',str(r).zfill(6))
job_filename = self.produce_job_filename()
todo_result = xomlib.Xomresult(measurement_name = measurement_name,
analysis_name= self.analysis_name,
analysis_version = self.analysis_version,
variable_name = "_".join(self.variable_list),
variable_value = job_filename,
runid = r,
container = cont,
tag = "todo")
todo_result.save()
# self.write_job_file(job_filename, cont, command)
else:
self.logger.info(f"no valid run analysis {self.analysis_name}")
def test_log(self):
self.logger.info("just testing level INFO" )
self.logger.debug("just testing level DEBUG" )
self.logger.error("just testing level ERROR" )
# class test_scada(Analysis):
# class light_yield(Analysis):
# def produce_list_of_runs(self,list_of_runs):
# list_of_command = []
# run_max = 50112
# exclude_tags = self.exclude_tags_list
# #["messy","bad", "nonsr0_configuration", "ramp down", "ramp up", "ramp_down", "ramp_up", "hot_spot","abandon"]
# exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
# include_tags = ["_sr1_preliminary"]
# include_tags_query = [{"tags.name": i} for i in include_tags]
# run_mode ='tpc_kr83m'
# coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode}))
# valid_runs = []
# [valid_runs.append(x['number']) for x in coll]
# print ("QP analysis: ", valid_runs)
# valid_runs.sort()
# valid_runs = list(filter(lambda r: (r > self.min_run) & (r < run_max), valid_runs) )
# print(valid_runs)
# if valid_runs:
# self.logger.info("looping on valid runs")
# for cont in self.container_list:
# valid_runs_str = str(valid_runs).strip('[]')
# self.logger.info('in cont %s, appending new command for runs: %s', cont, valid_runs_str)
# for r in valid_runs:
# self.logger.info(f'producing job file for runid {r} for container {cont}')
# command = self.command.replace('[run]',str(r).zfill(6))
# list_of_command.append(self.command.replace('[run]',str(r)) )
# job_filename = self.produce_job_filename()
# todo_result = xomlib.Xomresult(measurement_name = "xomtodo",
# analysis_name= self.analysis_name,
# analysis_version = self.analysis_version,
# variable_name = "_".join(self.variable_list),
# variable_value = job_filename,
# runid = r,
# container = cont,
# tag = "todo")
# todo_result.save()
# self.write_job_file(job_filename, cont, command)
# else:
# self.logger.info(f"no valid run analysis {self.analysis_name}")
# class event_rate(Analysis):
# def produce_list_of_runs(self,list_of_runs):
# # self.min_run = 52035
# exclude_tags = ["messy","bad", "ramp down", "ramp up", "ramp_down", "ramp_up", "hot_spot","abandon"]
# exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
# include_tags = ["_sr1_preliminary"]
# include_tags_query = [{"tags.name": i} for i in include_tags]
# coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query}))
# # coll = list(rundb.find({"$and" : exclude_tags_query}))
# valid_runs = []
# [valid_runs.append(x['number']) for x in coll]
# print ("EVENT_RATE analysis: ", valid_runs)
# valid_runs.sort()
# # valid_runs = list(filter(lambda r: r % 10 == 0, valid_runs))
# valid_runs = list(filter(lambda r: (r > self.min_run), valid_runs) )
# print(valid_runs)
# if valid_runs:
# self.logger.info("looping on valid runs")
# for cont in self.container_list:
# valid_runs_str = str(valid_runs).strip('[]')
# self.logger.info('in cont %s, appending new command for runs: %s', cont, valid_runs_str)
# for r in valid_runs:
# self.logger.info(f'producing job file for runid {r} for container {cont}')
# command = self.command.replace('[run]',str(r).zfill(6))
# job_filename = self.produce_job_filename()
# todo_result = xomlib.Xomresult(measurement_name = "xomtodo",
# analysis_name= self.analysis_name,
# analysis_version = self.analysis_version,
# variable_name = "_".join(self.variable_list),
# variable_value = job_filename,
# runid = r,
# container = cont,
# tag = "todo")
# todo_result.save()
# self.write_job_file(job_filename, cont, command)
# else:
# self.logger.info(f"no valid run analysis {self.analysis_name}")
# class photo_ionization(Analysis):
# def produce_list_of_runs(self,list_of_runs):
# # available = ("event_basics", "peak_basics"))
# exclude_tags = ["messy","bad", "ramp down", "ramp up", "ramp_down", "ramp_up", "hot_spot","abandon"]
# exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
# include_tags = ["_sr1_preliminary"]
# include_tags_query = [{"tags.name": i} for i in include_tags]
# coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query}))
# # coll = list(rundb.find({"$and" : exclude_tags_query}))
# valid_runs = []
# [valid_runs.append(x['number']) for x in coll]
# print ("EVENT_RATE analysis: ", valid_runs)
# valid_runs.sort()
# # valid_runs = list(filter(lambda r: r % 10 == 0, valid_runs))
# valid_runs = list(filter(lambda r: (r > self.min_run), valid_runs) )
# print(valid_runs)
# if valid_runs:
# self.logger.info("looping on valid runs")
# for cont in self.container_list:
# valid_runs_str = str(valid_runs).strip('[]')
# self.logger.info('in cont %s, appending new command for runs: %s', cont, valid_runs_str)
# for r in valid_runs:
# self.logger.info(f'producing job file for runid {r} for container {cont}')
# command = self.command.replace('[run]',str(r).zfill(6))
# job_filename = self.produce_job_filename()
# todo_result = xomlib.Xomresult(measurement_name = "xomtodo",
# analysis_name= self.analysis_name,
# analysis_version = self.analysis_version,
# variable_name = "_".join(self.variable_list),
# variable_value = job_filename,
# runid = r,
# container = cont,
# tag = "todo")
# todo_result.save()
# self.write_job_file(job_filename, cont, command)
# else:
# self.logger.info(f"no valid run analysis {self.analysis_name}")
# class test_var_1(Analysis):
# def produce_list_of_runs(self,list_of_runs):
# list_of_command = []
# run_min = 51693
# # valid_runs = list(filter(lambda r: r > run_min == 0, list_of_runs) )
# # valid_runs_dict = list(rundb.find({"number" : {"$in": valid_runs}},{'number':1,'_id':0}))
# # valid_runs_dict = list(rundb.find({"number" : {"$in": valid_runs}, "mode":"tpc_bkg"},{'number':1,'_id':0}))
# # valid_runs = [list(valid_dict.values())[0] for valid_dict in valid_runs_dict]
# # valid_runs = list(filter(lambda r: r % 25 == 0, valid_runs))
# valid_runs = list(filter(lambda r: r > run_min, list_of_runs) )
# print(valid_runs)
# if valid_runs:
# self.logger.info("looping on valid runs")
# for cont in self.container_list:
# valid_runs_str = str(valid_runs).strip('[]')
# self.logger.info('in cont %s, appending new command for runs: %s', cont, valid_runs_str)
# for r in valid_runs:
# command = self.command.replace('[run]',str(r))
# list_of_command.append(self.command.replace('[run]',str(r)) )
# utc_ts =int(time.time())
# job_filename = self.variable_name + "_" + str(utc_ts) + '.sh'
# self.result = {"var_name": job_filename, "runid":r,"type":"todo","analysis_name":self.variable_name, "container": cont, "value":1,"data":None}
# xomdbtodo.insert(self.result)
# self.write_job_file(job_filename, command)
# # influxdb_client.Point(constant.xomversion).tag("runid", str(result['runid'])).field(result['var_name'], result['value']).tag("type", "main")\
# #.tag("analyse", result['analysis_name']).tag("container",result['container'])
# else:
# print("no valid run analysis ", self.variable_name)
# return list_of_command
# class test_var_2(Analysis):
# def produce_list_of_runs(self, list_of_runs):
# list_of_command = []
# # specific dummy conditions for test_var_2: 2 consecutive Kr runs
# run_min = 40000
# # find the kr83m runs within the new runs
# coll = list(rundb.find({"number" : {"$in": list_of_runs}, "mode":"background_linked"},{'number':1}))
# valid_runs = []
# [valid_runs.append(x['number']) for x in coll]
# valid_runs.sort()
# valid_runs = list(filter(lambda r: r > run_min, valid_runs) )
# valid_run_lists = []
# run_size = len(valid_runs)
# skip = False
# for i, r in enumerate(valid_runs):
# if skip == True:
# skip = False
# continue
# if i+1 < run_size:
# if (valid_runs[i+1] == r+1):
# valid_run_lists.append([valid_runs[i],valid_runs[i+1]])
# skip = True
# for l in valid_run_lists[::10]:
# for cont in self.container_list:
# list_of_command.append(self.command.replace('[runs]', " ".join(map(str,l))) + " --container " + cont)
# if len(l) == 0:
# logging.info("no valid run this analysis")
# return list_of_command
# class ly_qp(Analysis):
# def produce_list_of_runs(self, list_of_runs):
# list_of_command = []
# exclude_tags = ["messy","bad", "nonsr0_configuration", "ramp down", "ramp up", "ramp_down", "ramp_up", "hot_spot","abandon"]
# exclude_tags_query = [{"tags.name":{"$ne": e}} for e in exclude_tags]
# include_tags = [{"$regex":"_sr0"},"lt_24h_after_kr"]
# include_tags_query = [{"tags.name": i} for i in include_tags]
# run_mode ='tpc_kr83m'
# coll = list(rundb.find({"$and" : exclude_tags_query,"$or": include_tags_query, "mode":run_mode}))
# valid_runs = []
# [valid_runs.append(x['number']) for x in coll]
# print ("QP analysis: ", valid_runs)
# valid_runs.sort()
# valid_runs = list(filter(lambda r: r % 10 == 0, valid_runs))
# if valid_runs:
# for cont in self.container_list:
# for r in valid_runs:
# list_of_command.append(self.command.replace('[run]',str(r)) + " --container " + cont)
# else:
# print("no valid run analysis ", self.variable_name)
# return list_of_command
<file_sep>from utilix import xent_collection
import pandas as pd
def get_context_hashes(envs=None):
ctxs = xent_collection(collection='contexts')
data_hashes = []
for doc in ctxs.find():
doc.pop('_id', None)
hashes = doc.pop('hashes', {})
doc['context'] = doc.pop('name', 'UNKOWN')
doc['environment'] = doc.pop('tag', 'UNKOWN')
for data_type, lhash in hashes.items():
row = dict(doc)
row['data_type'] = data_type
row['lineage_hash'] = lhash
data_hashes.append(row)
df = pd.DataFrame(data_hashes)
print(df.columns)
return df
def iter_rundb_data():
max_rows=100
print('toto')
coll = xent_collection()
# print(coll.find_one({"number":52200,"$or":[{'detector.name':'tpc'}]}))
# print(coll.find_one({"number":52236,"$or":[{'detectors':'tpc'}, {'detectors':'muon_veto'}]}))
print(coll.find_one({"number":52243,"$or":[{'detectors':'tpc'}]}))
# rows = []
# for doc in coll.find({},
# projection={"number": 1,
# "mode": 1,
# "source": 1,
# "tags": 1,
# "detectors": 1,
# "_id": 0,
# "start": 1,
# "end": 1,
# "data": 1}):
# print('toto')
# run_data = dict(doc)
# dsets = doc.pop('data', [])
# start = pd.to_datetime(run_data.pop("start") )
# end = pd.to_datetime(run_data.pop("end"))
# if end is None:
# continue
# tags = run_data.pop("tags", [])
# detectors = run_data.pop("detectors", ["tpc"])
# source = run_data.pop("source", 'NA')
# if isinstance(source, dict):
# source = source.get('type', 'NA')
# if source is None:
# source = 'NA'
# if not isinstance(source, str):
# continue
# # row["time"] = start + 0.5*(end - start)
# run_data['science_run'] = get_science_run(tags)
# run_data['detectors'] = get_detector_string(detectors)
# run_data["source"] = source
# run_data["start"] = start
# run_data["end"] = end
# for dset in dsets:
# row = {
# "catalog": "runs_db",
# "location": dset.get('location', "UNKNOWN"),
# "data_type": dset.get('type', "UNKNOWN"),
# "lineage_hash": dset.get('lineage_hash', "UNKNOWN"),
# "size_mb": dset.get('size_mb', "UNKNOWN"),
# "status": dset.get('status', "UNKNOWN")
# }
# row.update(run_data)
# rows.append(row)
# if len(rows)>=max_rows:
# yield pd.DataFrame(rows)
# print(rows)
# rows = []
# if len(rows):
# yield pd.DataFrame(rows)
iter_rundb_data()
get_context_hashes()
<file_sep>from flask_login import UserMixin
from werkzeug.security import check_password_hash
from werkzeug.security import generate_password_hash
from app.extensions import db
from app.extensions import login
from app.extensions import ldap
@login.user_loader
def load_user(id):
if id is None or id == 'None':
id =-1
print ('ID leaving load_user', (id))
return User.query.get(int(id))
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True)
password_hash = db.Column(db.String(128))
# username = db.Column(db.String(100))
def __init__(self, username):
self.username = username
def set_password(self, password):
self.password_hash = generate_password_hash(password)
@staticmethod
def try_login(username, password):
res = ldap.bind_user(username,password)
# print ('bind resuls = ', res,flush=True)
return res
def __repr__(self):
return '<User {}>'.format(self.username)
<file_sep>#!/usr/bin/env python
import os
import sys
import pprint
import numpy as np
import time
import constant
import info
import datetime
from dateutil.tz import tzutc
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
import logging
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(name)-20s - %(levelname)-5s - %(message)s')
ch.setFormatter(formatterch)
########################
### connection to DB ###
########################
class Xomdb:
def __init__(self, type_of_db, measurement_name):
self.type_of_db= type_of_db
self.client = None
# self.query_apy = None
self.measurement_name = measurement_name
self.connect()
self.logger = logging.getLogger(self.__class__.__module__ + '.' + self.__class__.__name__)
self.logger.debug(f"creating instance of {self.__class__}")
# add the handlers to the logger
self.logger.addHandler(ch)
def connect(self):
if self.type_of_db == 'influxdb':
try:
client = influxdb_client.InfluxDBClient(
url=info.url,
token=info.token,
org=info.org
)
except:
print('could not connect to the DB {} '.format(database) )
self.client = client
def get_last_runid(self):
'''will query the latest runid'''
if self.type_of_db == 'influxdb':
query_api = self.client.query_api()
fluxquery = 'from(bucket:"xom") |> range(start: '+ str(-constant.query_period) + 'd) |> filter(fn: (r) => r._measurement == \"' +self.measurement_name + '\")|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") |>group()|> max(column:"runid")'
try:
tables = query_api.query(fluxquery)
self.last_run_id = tables[0].records[0]['runid']
except:
self.last_run_id = -1
return self.last_run_id
def get_last_runid_from_analysis(self, analysis_name):
'''will query the latest runid'''
if self.type_of_db == 'influxdb':
query_api = self.client.query_api()
fluxquery = 'from(bucket:"xom") |> range(start: '+ str(-constant.query_period) + 'd) |> filter(fn: (r) => r._measurement == \"' + self.measurement_name + '\") |> filter(fn: (r) => r.analysis_name == \"'+ analysis_name + '\")|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") |> max(column:"runid")'
try:
tables = query_api.query(fluxquery)
last_run_id = tables[0].records[0]['runid']
except:
last_run_id = -1
return last_run_id
def insert_record(self, record):
'''
record or list of record
'''
write_api = self.client.write_api(write_options=SYNCHRONOUS)
if isinstance(record,list):
for r in record:
write_api.write(bucket="xom", org=self.client.org, record=r)
else:
write_api.write(bucket="xom", org=self.client.org, record=record)
def insert(self, result):
'''related to the data format from the xomsaver in xomlib
result['analyse_name'] = analysis_name
result['timestamp'] = int(timestamp/1e9)
result['run_id'] = int(run_id)
result['var_name'] = var_name
result['container'] = container
result['value'] = var_value
result['type'] = type
result['tag'] = tag
result['data'] = data
'''
if self.type_of_db == 'influxdb':
write_api = self.client.write_api(write_options=SYNCHRONOUS)
p = influxdb_client.Point(self.measurement_name).field('var_name',result['var_name']).field("analysis",result['analysis_name']).field('output', result['value']).tag("type", result['type']).tag("container",result['container']).field("runid", result['runid'])
write_api.write(bucket="xom", org=self.client.org, record=p)
if result['data']:
outdata = []
for datum in data:
p = influxdb_client.Point(constant.xomversion).field(result['var_name'], result['value']).tag("type", "extra").tag("analyse", result['analysis_name']).tag("container",result['container'].field("runid", result['runid']))
outdata.append(p)
write_api.write(bucket="xom", org=self.client.org, record=outdata)
elif self.type_of_db == 'mongodb':
xomdata = self.client['xom']['data']
xomdata.insert_one(result)
# def insert_first(self, var_name):
# if self.type_of_db == 'influxdb':
# write_api = self.client.write_api(write_options=SYNCHRONOUS)
# p = influxdb_client.Point(constant.xomversion).tag("runid", str(result['runid'])).field(result['var_name'], result['value']).tag("type", "main").tag("analyse", result['analysis_name']).tag("container",result['container'])
# write_api.write(bucket=bucket, org=org, record=p)
# if data:
# for datum in data:
# p = influxdb_client.Point(constant.xomversion).tag("runid", str(result['run_id'])).field(result['var_name'], datum).tag("type", "extra").tag("analyse", result['analysis_name']).tag("container",result['container'])
# write_api.write(bucket=bucket, org=org, record=p)
# elif self.type_of_db == 'mongodb':
# xomdata = self.client['xom']['data']
# xomdata.insert_one(result)
def query_all(self):
query_api = self.client.query_api()
fluxquery = 'from(bucket:"xom")|> range(start: '+ str(-constant.query_period) + 'd) |> filter(fn: (r) => r._measurement == \"' + self.measurement_name + '\") |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") |>group() '
tables = query_api.query(fluxquery)
return tables
def query(self, query):
query_api = self.client.query_api()
fluxquery = query
tables = query_api.query(query)
return tables
def get_list(self):
query_api = self.client.query_api()
fluxquery = 'from(bucket:"xom") |> range(start: '+ str(-constant.query_period) + 'd) |> filter(fn: (r) => r._measurement == \"' + self.measurement_name + '\") |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") |> group() '
tables = query_api.query(fluxquery)
return tables
def get_dict_from_record(self,p):
return {"var_name":p['var_name'],'runid':p['runid'],'analysis_name':p['analysis_name'],"type": p['type'],'container':p['container'],'value':p['output'],'data':None}
# def insert_p(self,p):
# write_api = self.client.write_api(write_options=SYNCHRONOUS)
# p = {'result': '_result', 'table': 0, '_start': datetime.datetime(2022, 12, 26, 22, 56, 20, 706467, tzinfo=tzutc()), '_stop': datetime.datetime(2023, 4, 5, 22, 56, 20, 706467, tzinfo=tzutc()), '_time': datetime.datetime(2023, 4, 5, 22, 48, 37, 959693, tzinfo=tzutc()), 'measurement': 'xomdone',
# # p = influxdb_client.Point(self.measurement_name).field('var_name',"toto").field("analysis","analystoto").field('output', 23).tag("type", "main").tag("container","conttoto").field("runid",12)
# print(p)
# write_api.write(bucket="xom", org=self.client.org, record=p)
def delete(self):
start = "1970-01-01T00:00:00Z"
stop = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
delete_api = self.client.delete_api()
delete_api.delete(start, stop,'_measurement=\"' + self.measurement_name + '\"', bucket='xom')
def delete_record(self, p):
d1 = datetime.timedelta(microseconds=-1)
d2 = datetime.timedelta(microseconds=+1)
start = p['_time'] + d1
stop = p['_time'] + d2
delete_api = self.client.delete_api()
delete_api.delete(start, stop,'_measurement=\"' + self.measurement_name + '\"', bucket='xom')
<file_sep>import dash_core_components as dcc
import dash_html_components as html
import dash_table
from dash_table.Format import Format, Scheme, Sign, Symbol
import dash
import dash_core_components as dcc
import dash_html_components as html
import numpy as np
import pandas as pd
from dash.dependencies import Input, Output
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
# make a sample data frame with 6 columns
np.random.seed(0)
from datetime import datetime as dt
from app.utils import getdata, getalldata
from app.utils import make_dash_table, create_plot
#df = getalldata()
df = getdata("el_lifetime","v3.0")
layout = html.Div([
html.Div([
html.Div([
html.A(
html.H2("Back to home page"),
id="home_page",
href="https://xe1t-offlinemon.lngs.infn.it/dash/",
),
],style={'width':'30%','float':'left'}),
html.Div([
html.A(
html.H2(children="logout",style={'margin-left':'50%'}),
id="logout",
href="https://xe1t-offlinemon.lngs.infn.it/logout/",
),
],style={'width':'30%','float':'right'}),
]),
# to be fixed: the space organisation isn't very clean.
# I found this quick fix to put the text where I want but it is not very satisfactory
html.Br(),
html.Br(),
html.Br(),
html.P(html.H1('Test data of electron lifetime')),
# main div with the (graph + dropdowns) + fig
html.Div([
html.Div(
dcc.Graph(id='g1', config={'displayModeBar': False}),
className='four columns'
),
html.Div(
dcc.Graph(id='g2', config={'displayModeBar': False}),
className='four columns'
),
html.Div(
dcc.Graph(id='g3', config={'displayModeBar': False}),
className='four columns'
)
], className='row'),
html.Br(),
html.Br(),
html.Br(),
html.P(html.H1('selected points table')),
html.Div([
dash_table.DataTable(
id='table',
# columns=[{"name": i, "id": i} for i in df.columns],
data=df.to_dict('records'),
columns=[{
'id': 'run_number',
'name': 'Run Number',
'type': 'numeric'
}, {
'id': 'value',
'name': u'Value',
'type': 'numeric',
'format': Format(precision=4),
},
{'id': 'error',
'name': u'Error',
'type': 'numeric',
'format': Format(precision=4)
},
{'id': 'time',
'name': u'Time',
'type': 'string',
# 'format': Format(precision=4)
}
# columns=[{"name": i, "id": i} for i in df[['run_number','value','error']].columns],
]
)
],style={'width':'50%','float':'middle'}),
])
def get_figure(dfsel, x_col, y_col, selectedpoints, selectedpoints_local):
if selectedpoints_local and selectedpoints_local['range']:
ranges = selectedpoints_local['range']
selection_bounds = {'x0': ranges['x'][0], 'x1': ranges['x'][1],
'y0': ranges['y'][0], 'y1': ranges['y'][1]}
else:
selection_bounds = {'x0': np.min(dfsel[x_col]), 'x1': np.max(dfsel[x_col]),
'y0': np.min(dfsel[y_col]), 'y1': np.max(dfsel[y_col])}
# set which points are selected with the `selectedpoints` property
# and style those points with the `selected` and `unselected`
# attribute. see
# https://medium.com/@plotlygraphs/notes-from-the-latest-plotly-js-release-b035a5b43e21
# for an explanation
return {
'data': [{
'x': dfsel[x_col],
'y': dfsel[y_col],
'text': dfsel.index,
'textposition': 'top',
'selectedpoints': selectedpoints,
'customdata': dfsel.index,
'type': 'scatter',
'mode': 'markers+text',
'marker': { 'color': 'rgba(0, 116, 217, 0.7)', 'size': 12 },
'unselected': {
'marker': { 'opacity': 0.3 },
# make text transparent when not selected
'textfont': { 'color': 'rgba(0, 0, 0, 0)' }
}
}],
'layout': {
'margin': {'l': 50, 'r': 0, 'b': 50, 't': 0},
'dragmode': 'select',
'hovermode': False,
'xaxis':{"title":x_col,"color": "#000000"},
'yaxis':{"title":y_col,"color": "#000000"},
# Display a rectangle to highlight the previously selected region
'shapes': [dict({
'type': 'rect',
'line': { 'width': 1, 'dash': 'dot', 'color': 'darkgrey' }
}, **selection_bounds
)]
}
}
# this callback defines 3 figures
# as a function of the intersection of their 3 selections
def register_callbacks(dashapp):
@dashapp.callback(
[Output('g1', 'figure'),
Output('g2', 'figure'),
Output('g3', 'figure'),
Output('table','data' )],
[Input('g1', 'selectedData'),
Input('g2', 'selectedData'),
Input('g3', 'selectedData'),
]
)
def callback(selection1, selection2, selection3):
selectedpoints = df.index
for selected_data in [selection1, selection2, selection3]:
if selected_data and selected_data['points']:
selectedpoints = np.intersect1d(selectedpoints,
[p['customdata'] for p in selected_data['points']])
# dftable = df.iloc[[selectedpoints]]
# print ('dftable = ', dftable,flush=True)
# dftemp = df
# get_data(dftemp,selectedpoints,flush=True)
# print (df.loc[df.index],flush=True)
print (df.loc[selectedpoints],flush=True)
return [get_figure(df, "time", "value", selectedpoints, selection1),
get_figure(df, "time", "error", selectedpoints, selection2),
get_figure(df, "time", "chi2", selectedpoints, selection3),
df.loc[selectedpoints].to_dict('records')]
# @app.callback(
# Output("output-1","children"),
# [Input("save-button","n_clicks")],
# [State("table","data")]
# )
# def selected_data_to_csv(nclicks,table1):
# if nclicks == 0:
# raise PreventUpdate
# else:
# pd.DataFrame(table1).to_csv('H://R//filename.csv',index=False)
# return "Data Submitted"
<file_sep>from flask import Blueprint
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from flask import flash
from flask_login import current_user
from flask_login import login_required
from flask_login import login_user
from flask_login import logout_user
from werkzeug.urls import url_parse
from app.extensions import db
from app.extensions import login
from app.extensions import ldap
from app.forms import LoginForm
#from app.forms import RegistrationForm
from app.models import User
server_bp = Blueprint('main', __name__)
main_bp = Blueprint('main_bp', __name__,
template_folder='templates',
static_folder='static')
@server_bp.route('/')
def index():
# # return render_template("login.html", title='Home Page')
return render_template("index.html", title='Home Page')
@server_bp.route('/login/', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('main_bp.home'))
form = LoginForm()
if form.validate_on_submit():
ldapres = User.try_login(form.username.data, form.password.data)
if not ldapres:
flash(
'Invalid username or password. Please try again.',
'danger')
error = 'Invalid username or password'
return render_template('login.html', form=form, error=error)
user = User.query.filter_by(username=form.username.data).first()
if not user:
#user = User(username, password)
user = User(username=form.username.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
login_user(user)
flash('You have successfully logged in.', 'success')
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
next_page = url_for('main.index')
# next_page = url_for('main_bp.home')
return redirect(next_page)
return render_template('login.html', title='Sign In', form=form)
@server_bp.route('/logout/')
@login_required
def logout():
logout_user()
return redirect(url_for('main.index'))
# @server_bp.route('/register/', methods=['GET', 'POST'])
# def register():
# if current_user.is_authenticated:
# return redirect(url_for('main.index'))
# form = RegistrationForm()
# if form.validate_on_submit():
# user = User(username=form.username.data)
# user.set_password(form.password.data)
# db.session.add(user)
# db.session.commit()
# return redirect(url_for('main.login'))
# return render_template('register.html', title='Register', form=form)
@main_bp.route('/dash/')
@login_required
def home():
"""Landing page."""
return render_template('home.html',
title='home page',
body="under construction")
<file_sep>from datetime import datetime as dt
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input
from dash.dependencies import Output
from app.utils import getdata, getalldata, getvariables
from app.utils import make_dash_table, create_plot, create_plot_with_runid, create_plot_errorx
import os
import base64
df = getalldata()
initvar = 'lightyield'
default_strax = '2.1.1'
default_straxen = '1.2.3'
dftemp = df.loc[ ( df['variable_name']==initvar ) & (df['strax_version']==default_strax) & ( df['straxen_version']==default_straxen) ]
dfvar = getvariables()
dfvartemp = dfvar.loc[ (dfvar['variable_name']==initvar) ]
print('tes ======== ', dfvartemp)
process_dict = {var:leg for (var, leg) in (zip(dfvar['variable_name'], dfvar['legend_name']) )}
unit_dict = {var:unit for (var, unit) in (zip(dfvar['variable_name'], dfvar['unit']) )}
print('process_dict = ' , process_dict)
FIGURE = create_plot(
x=dftemp["timestamp"].astype('int'),
xlabel='time',
y=dftemp["value"],
ylabel=dfvartemp['legend_name'][0],
error=dftemp["error"],
figname=df["figname"]
)
FIGURE_WITH_RUNID = create_plot_with_runid(
x=dftemp["timestamp"],
xrunid=dftemp["run_id"],
xlabel='Time Stamp',
y=dftemp["value"],
ylabel=dfvartemp['legend_name'][0],
yunit=dfvartemp['unit'][0],
error=dftemp["error"],
figname=df["figname"]
)
image_filename = '/home/xom/xom/frontend/app/assets/logo_xenon.png'
image_svg = '/home/xom/xom/frontend/app/assets/xenonlogo.svg'
logo = '/home/xom/xom/frontend/app/assets/xenonlogo.png'
def b64_image(image_filename):
with open(image_filename, 'rb') as f:
image = f.read()
return 'data:image/png;base64,' + base64.b64encode(image).decode('utf-8')
encoded_image = base64.b64encode(open(image_svg, 'rb').read()).decode()
def b64_imagesvg(image_filename):
with open(image_filename, 'rb') as f:
image = f.read()
return 'data:image/svg;base64,' + base64.b64encode(image).decode('utf-8')
variable_option = [{'label':leg, 'value':var} for leg, var in zip(dfvar['legend_name'], dfvar['variable_name'])]
layout = html.Div(className="body",children=[
html.Div( className='navbar' ,children=[
html.Div( className='container' , children=[
html.Div( className='logodiv', children=[
# html.A("Link to external site",[ html.Img(className='logoim', src=b64_image(logo)), href='https://xe1t-offlinemon.lngs.infn.it/'])
html.A(html.Img(className='logoim', src=b64_image(logo)), href='https://xe1t-offlinemon.lngs.infn.it/'),
html.A([html.Span("X"), "enon ",html.Span("O"),"ffline ",html.Span("M"),"onitoring" ], href='https://xe1t-offlinemon.lngs.infn.it/',style={'position':'absolute' ,'margin-left':'1.9em','font-size':22}),
]), #logodiv
]) #container
]), #navbar
html.P(html.H2('Quick View'),style={'text-align': 'center'}),
# # to be fixed: the space organisation isn't very clean.
# # I found this quick fix to put the text where I want but it is not very satisfactory
# html.Br(),
# html.Br(),
# html.Br(),
# html.P(html.H1('Test data')),
# # main div with the (graph + dropdowns) + fig
# html.Div([
# # div (graph + dropdowns)
# html.Div([
# # dropdown div for process
# html.Div([
# dcc.Dropdown(
# id='my-dropdown',
# options=[
# {'label': 'Electron Lifetime', 'value': 'el_lifetime'},
# {'label': 'Charge Yield', 'value': 'charge_yield'},
# {'label': 'Light Yield', 'value': 'light_yield'}
# ],
# value='el_lifetime',
# clearable=False
# ),
# ],style={'width': '48%', 'display': 'inline-block'}),
# # end dropdown div process
# # dropdown div version
# html.Div([
# dcc.Dropdown(
# id='version-dropdown',
# options=[
# {'label': 'version 1.0', 'value': 'v1.0'},
# {'label': 'version 2.0', 'value': 'v2.0'},
# {'label': 'version 3.0', 'value': 'v3.0'},
# {'label': 'version 4.0', 'value': 'v4.0'},
# ],
# value='v4.0',
# clearable=False
# ),
# ],style={'width': '48%', 'float': 'right', 'display': 'inline-block'}),
# # end dropdown div version
# html.Div([
# dcc.Graph(id='my-graph',
# hoverData={"points": [{"pointNumber":0}]},
# figure=FIGURE)
# ],style={'width': '90%', 'display': 'inline-block'}),
# ],style={'width': '60%', 'display': 'inline-block'}),
# # end div (graph+ dropdown)
# # div hover plot
# html.Div([
# html.P(html.H2('matching graph'),style={'float':'right'}),
# html.Img(id='embedded_plot',
# src=''.format(encoded_image_fix.decode()),style={'width': '100%', 'display': 'inline-block','height':'auto','vertical-align':'middle'})
# ],style={'width': '30%' ,'display': 'inline-block','vertical-align':'top'}),
# # end div hover plot
# ])
# # end (graph+ dropdown) + hover plot
])
process_dict = { 'el_lifetime':'Electron Lifetime [us]','charge_yield': 'Charge Yield [p.e./keV]','light_yield':'Light Yield [p.e./keV]'}
def register_callbacks(dashapp):
#callback for the main plot
# @dashapp.callback(Output('my-graph', 'figure'), [Input('my-dropdown', 'value'), Input('version-dropdown', 'value')])
def update_graph(selected_dropdown_value, version_value):
print('toto')
# dftemp = df.loc[(df['version'] == version_value) & (df['process'] == selected_dropdown_value)]
# return create_plot(
# x=dftemp["time"],
# xlabel='time',
# y=dftemp["value"],
# ylabel=process_dict[selected_dropdown_value],
# error=dftemp['error'],
# figname=dftemp['figure']
# )
# talke the hover data and the two dropdowns as input to update the graph
# @dashapp.callback(Output("embedded_plot", "src"),[Input("my-graph", "hoverData"),Input('my-dropdown', 'value'), Input('version-dropdown', 'value')])
def picture_on_hover(hoverData,process,version):
"""
params hoverData: data on graph hover, and dropdowns
update the graph as the users passes the mouse on a point or the user changes the drop down values.
"""
print('tata')
# if hoverData is None:
# raise PreventUpdate
# try:
# dftemp = df.loc[(df['version'] == version) & (df['process'] == process)]
# figtemp = create_plot(
# x=dftemp["timestamp"],
# xlabel='time',
# y=dftemp["value"],
# ylabel=process,
# error=dftemp['error'],
# figname=dftemp['finame']
# )
# # gets the index the point on which the mouse is on
# point_number = hoverData["points"][0]["pointNumber"]
# # gets the corresponding figure name
# figname = str(figtemp["data"][0]["text"].values[point_number]).strip()
# image_path = figname
# encoded_image = base64.b64encode(open(image_path, 'rb').read())
# # the way I found to print out the figure...
# return 'data:image/png;base64,{}'.format(encoded_image.decode())
# except Exception as error:
# print(error)
# raise PreventUpdate
<file_sep>#libraries
from utilix.config import Config
from argparse import ArgumentParser
import json
import cutax
import strax
import straxen
import matplotlib.pyplot as plt
import numpy as np
import sys
import pandas as pd
sys.path +=['../utils/']
import xomlib as xl
import constant as cst
#function to save data
def SaveData(result,filename):
with open(filename,'w') as f:
json.dump(result,f)
f.close()
#graph typography
font_large = {'family': 'serif',
'color': 'darkred',
'weight': 'normal',
'size': 24,
}
#function to calculate calibration parameters (Kr83m)
def MyAnalysis(runnumber, container):
print("Begin of My Analysis")
#access to xenonnt data
st = cutax.contexts.xenonnt_v7()
#charging the run
data = st.get_df(runnumber, targets = ('event_info_double'), progress_bar=False)
#timestamp
time = data['time'].values
#using mean to avoid edge effects
timestamp = np.mean(time)
del time
#strax version
straxversion = strax.__version__
#straxen version
straxenversion = straxen.__version__
#for quality cuts
def line(x,a,b):
return a*x+b
def diffusion_model(t,w_SE, w_t0, t_0):
return np.sqrt(w_SE**2 + ((w_t0 - w_SE)**2 /t_0) * t)
w_SE = 599.70428e-3
w_t0 = 400.29572e-3
t_0 = 1.0029191e-3
#gamma photon energies
ES1a = 32.2
ES1b = 9.4
ES1 = ES1a + ES1b
#rsquare cut (for both populations)
datacor = data[((data['s2_a_x_mlp']**2)+(data['s2_a_y_mlp']**2))<=3100]
#DOUBLE S1 population
#cut large box (arbitrary)
xmin = 100
xmax = 700
ymin = 30
ymax = 250
bigbox = datacor[(datacor['s1_a_area']<= xmax) & (datacor['s1_a_area']>= xmin) & (datacor['s1_b_area']<= ymax) & (datacor['s1_b_area']>= ymin)]
s1_a_area = bigbox['s1_a_area'].values
s1_b_area = bigbox['s1_b_area'].values
#litlle box (automatic)
#mean
xmoy=np.mean(s1_a_area)
ymoy=np.mean(s1_b_area)
#standard deviation
sigmax=np.std(s1_a_area)
sigmay=np.std(s1_b_area)
sigma=2.5 #arbitrary but works well
#new definiton of the box
xmin=xmoy-sigma*sigmax
xmax=xmoy+sigma*sigmax
ymin=ymoy-sigma*sigmay
ymax=ymoy+sigma*sigmay
#cut box
datacut = datacor[(datacor['s1_a_area']<= xmax) & (datacor['s1_a_area']>= xmin) & (datacor['s1_b_area']<= ymax) & (datacor['s1_b_area']>= ymin)]
#removing useless datas
del bigbox, s1_a_area, s1_b_area
#quality cuts
dataline = datacut[datacut['s1_a_area_fraction_top'] < line(datacut['drift_time'],-2.3e-7,0.70)]
dataline = dataline[dataline['s1_a_area_fraction_top'] > line(dataline['drift_time'],-2e-7,0.40)]
d1 = dataline[dataline['s2_a_range_50p_area']/diffusion_model(dataline['drift_time'], w_SE, w_t0, t_0) > (-30/(dataline['drift_time']*1e-3+10)+0.8)]
d2 = d1[d1['s2_a_range_50p_area']/diffusion_model(d1['drift_time'], w_SE, w_t0, t_0) < (30/(d1['drift_time']*1e-3+10)+1.2)]
qualitycut = d2[(d2['drift_time']*1e-3) < 2400]
#removing useless datas
del dataline, d1, d2, datacut
#standard cuts for DOUBLE S1 population
allcutquality = qualitycut[(qualitycut['ds_s1_dt']>750) & (qualitycut['ds_s1_dt'] <2000) & (qualitycut['s1_a_n_channels'] >= 80) & (qualitycut['s1_a_n_channels'] < 225) & (qualitycut['s1_b_n_channels'] >= 25) & (qualitycut['s1_b_n_channels'] < 125)]
#acceptance of DOUBLE S1 population
acceptanceDoubleS1 = (len(allcutquality)/len(data))*100
#calcul of parameters for Double S1 population
#Light Yield
S1_a = allcutquality['s1_a_area'].values
S1_b = allcutquality['s1_b_area'].values
#mean
S1amoy = np.mean(S1_a)
S1bmoy = np.mean(S1_b)
#standard deviation
S1asigma = np.std(S1_a)
S1bsigma = np.std(S1_b)
#standard error of mean
errorS1amoy = S1asigma/np.sqrt(len(S1_a))
errorS1bmoy = S1bsigma/np.sqrt(len(S1_b))
#Light Yield
LYS1a = S1amoy/ES1a
LYS1b = S1bmoy/ES1b
#Light Yield error
DLYS1a = errorS1amoy/ES1a
DLYS1b = errorS1bmoy/ES1b
#graphe LY 32.2 keV
fig1 = plt.figure(figsize=(12,8))
ax = plt.subplot(111)
args = dict(orientation = "horizontal", pad = 0.2, aspect = 50,
fraction=0.046)
h1 = plt.hist(S1_a, bins = 50, color = 'blue',
histtype = 'step', linestyle = 'solid');
y = np.max(h1[0])
x = np.max(h1[1])
ax.set_title('DoubleS1 LY 32.2 keV',fontdict=font_large)
ax.set_xlabel('S1_a [PE]',fontdict=font_large)
ax.set_ylabel('events',fontdict=font_large)
ax.text(6.5*(x/10),6.5*(y/8),r'LY = {:0.2f} $\pm$ {:0.2f} $PE\cdot$$keV^{{-1}}$'.format(LYS1a,DLYS1a),color='red',fontsize='23')
ax.text(7*(x/10),5.5*(y/8),r'acceptance= {:0.2f} %'.format(acceptanceDoubleS1) ,color='red',fontsize='23')
#graphe LY 9.4 keV
fig2 = plt.figure(figsize=(12,8))
ax1 = plt.subplot(111)
args = dict(orientation = "horizontal", pad = 0.2, aspect = 50,
fraction=0.046)
h2 = plt.hist(S1_b, bins = 50,color = 'blue',
histtype = 'step', linestyle = 'solid');
y = np.max(h2[0])
x = np.max(h2[1])
ax1.set_title('DoubleS1 LY 9.4 keV',fontdict=font_large)
ax1.set_xlabel('S1_b [PE]',fontdict=font_large)
ax1.set_ylabel('events',fontdict=font_large)
ax1.text(6.2*(x/10),6.5*(y/8),r'LY = {:0.2f} $\pm$ {:0.2f} $PE\cdot$$keV^{{-1}}$'.format(LYS1b,DLYS1b),color='red',fontsize='23')
ax1.text(6.8*(x/10),5.5*(y/8),r'acceptance= {:0.2f} %'.format(acceptanceDoubleS1) ,color='red',fontsize='23')
#removing useless datas
del allcutquality, qualitycut, S1_a, S1_b
#results for DoubleS1 population
#results = []
# Output 1 (Light Yield 32.2 keV)
result1 = {}
result1['run_id'] = int(runnumber)
result1['run_ids'] = [int(runnumber)]
result1['variable_name'] = 'LYS1a'
result1['container'] = container
result1['timestamp'] = int(timestamp/1e9)
result1['value'] = LYS1a
result1['error'] = DLYS1a
result1['chisquared'] = None
result1['tag'] = 'test'
outfname1 = result1['variable_name']+'_'+str(result1['run_id']) + '_' + 'cont_' + result1['container']
outjsonname1 = outfname1+'.json'
outfigname1 = outfname1 + ".png"
result1['figname'] = outfigname1
# save the figure
figpath1 = './algorithms/lycy_qp/tmp/' + outfigname1
fig1.savefig(figpath1)
# Output 2 (Light Yield 9.4 keV)
result2 = {}
result2['run_id'] = int(runnumber)
result2['run_ids'] = [int(runnumber)]
result2['variable_name'] = 'LYS1b'
result2['container'] = container
result2['timestamp'] = int(timestamp/1e9)
result2['value'] = LYS1b
result2['error'] = DLYS1b
result2['chisquared'] = None
result2['tag'] = 'test'
outfname2 = result2['variable_name']+'_'+str(result2['run_id']) + '_' + 'cont_' + result2['container']
outjsonname2 = outfname2+'.json'
outfigname2 = outfname2 + ".png"
result2['figname'] = outfigname2
# save the figure
figpath2 = './algorithms/lycy_qp/tmp/' + outfigname2
fig2.savefig(figpath2)
#SINGLE S1 population
#quality cuts
p1 = datacor[datacor['s1_a_area_fraction_top'] < line(datacor['drift_time'],-2.3e-7,0.70)]
print(len(p1.values))
p2 = p1[p1['s1_a_area_fraction_top'] > line(p1['drift_time'],-2e-7,0.40)]
p3 = p2[p2['s2_a_range_50p_area']/diffusion_model(p2['drift_time'], w_SE, w_t0, t_0) > (-30/(p2['drift_time']*1e-3+10)+0.8)]
p4 = p3[p3['s2_a_range_50p_area']/diffusion_model(p3['drift_time'], w_SE, w_t0, t_0) < (30/(p3['drift_time']*1e-3+10)+1.2)]
qualitycutS1 = p4[(p4['drift_time']*1e-3) < 2400]
print(len(qualitycutS1.values))
#removing useless datas
del p1, p2, p3, p4
#standard cuts
p5 = qualitycutS1[(qualitycutS1['s1_a_n_channels'] >= 90) & (qualitycutS1['s1_a_n_channels'] < 225) & (qualitycutS1['s1_a_range_50p_area']>=60) & (qualitycutS1['s1_a_range_50p_area']<1000) & (qualitycutS1['s1_a_area_fraction_top']<0.68)]
#(qualitycutS1['ds_s1_dt'] ==0) &
allcutS1 = p5[(line(0.55,15,p5['s1_a_area']) > p5['s1_a_n_channels'])]
del p5, qualitycutS1
#SINGLES1allcut
S1 = allcutS1['s1_a_area']
#acceptance of Single S1 population
acceptanceSingleS1 = (len(allcutS1)/len(data))*100
#mean
S1moy = np.mean(S1)
#standard deviation
S1sigma = np.std(S1)
#standard error of mean
errorS1moy = S1sigma/np.sqrt(len(S1))
#Light Yield
LYS1 = S1moy/ES1
#Light Yield error
DLYS1 = errorS1moy
#graphe LY 41.6 keV
fig3 = plt.figure(figsize=(12,8))
ax2 = plt.subplot(111)
args = dict(orientation = "horizontal", pad = 0.2, aspect = 50,
fraction=0.046)
h3 = plt.hist(S1, bins = 100, color = 'blue',
histtype = 'step', linestyle = 'solid', range = [180,720]);
y = np.max(h3[0])
x = np.max(h3[1])
ax2.set_title('SingleS1 LY 41.6 keV',fontdict=font_large)
ax2.set_xlabel('S1 [PE]',fontdict=font_large)
ax2.set_ylabel('events',fontdict=font_large)
ax2.text(6*(x/10),6.5*(y/8),r'LY = {:0.2f} $\pm$ {:0.2f} $PE\cdot$$keV^{{-1}}$'.format(LYS1,DLYS1),color='red',fontsize='23')
ax2.text(6.5*(x/10),5.5*(y/8),r'acceptance= {:0.2f} %'.format(acceptanceSingleS1) ,color='red',fontsize='23')
# Output 3 (Light Yield 41.6 keV)
result = {}
result['run_id'] = int(runnumber)
result['run_ids'] = [int(runnumber)]
result['variable_name'] = 'LYS1'
result['container'] = container
result['timestamp'] = int(timestamp/1e9)
result['value'] = LYS1
result['error'] = DLYS1
result['chisquared'] = None
result['tag'] = 'test'
result['data'] = None
outfname = result['variable_name']+'_'+str(result['run_id']) + '_' + 'cont_' + result['container']
outjsonname = outfname+'.json'
outfigname = outfname + ".png"
result['figname'] = outfigname
# save the figure
figpath = './algorithms/lycy_qp/tmp/' + outfigname
fig3.savefig(figpath)
# write the json file:
xl.SaveData(result,'./algorithms/lycy_qp/tmp/' + outjsonname)
xl.SaveData(result1,'./algorithms/lycy_qp/tmp/' + outjsonname1)
xl.SaveData(result2,'./algorithms/lycy_qp/tmp/' + outjsonname2)
# write on the XOM data base at LNGS
xl.UploadDataDict(result, 'dali')
xl.UploadDataDict(result1, 'dali')
xl.UploadDataDict(result2, 'dali')
xl.UploadFile(figpath, '<EMAIL>:'+ cst.figfolder)
xl.UploadFile(figpath1, '<EMAIL>:'+ cst.figfolder)
xl.UploadFile(figpath2, '<EMAIL>:'+ cst.figfolder)
del datacor, data, S1, allcutS1
# #close all figures
# #plt.close('all')
# #saving datas
# SaveData(results,"result.json")
print("end of My Analysis")
return 0
def main():
parser = ArgumentParser("MyAnalysis")
config = Config()
parser.add_argument("number", help="Run number to process")
parser.add_argument("--container", type=str, help="container information")
args = parser.parse_args()
args.number = '%06d' % int(args.number)
MyAnalysis(args.number, args.container)
if __name__ == "__main__":
main()<file_sep>#needed for xom_saver:
import os
from doctest import run_docstring_examples
import sys
import json
#from bson.json_util import dumps
import constant
import subprocess
import dblib as dbl
import influxdb_client
import numpy as np
## figure folder on the machine that host the database and display (grafana)
#display_fig_folder = "/home/xom/data/v1.1/test/."
import logging
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(name)-20s - %(levelname)-5s - %(message)s')
ch.setFormatter(formatterch)
measurement_data_name = constant.measurement_name
success_message = "SUCCESSWITHXOM"
error_message = "ERRORWITHXOM"
class Xomresult:
def __init__(self,
analysis_name,
analysis_version,
variable_name,
variable_value,
runid,
measurement_name = measurement_data_name,
container=None,
runids = [""],
timestamp = 0,
data=None,
figure_path="",
tag = ""
):
self.measurement_name = measurement_name
self.analysis_name = analysis_name
self.analysis_version = analysis_version
self.variable_name = variable_name
self.variable_value = variable_value
self.runid = runid
if container == None:
self.container = os.getenv('SINGULARITY_NAME')
else:
self.container = container
self.runids = runids
self.timestamp = timestamp
self.data = data
self.figure_path = figure_path
self.tag = tag
self.result_dict = self.set_result_dict()
self.logger = logging.getLogger(self.__class__.__module__ + '.' + self.__class__.__name__)
# add the handlers to the logger
self.logger.addHandler(ch)
'''
analysis_name: str
variable_name: str
analysis_version: str
variable_value: float
runid: int
container : str (name of the container)
runids : list of runids
timestamp : int
data format is a dictionnary {"key":value, }
figure_path : str
tag:analysis specific tag
'''
def set_result_dict(self):
self.result_dict = {"measurement_name":self.measurement_name, "analysis_name": self.analysis_name, "analysis_version": self.analysis_version, "variable_name": self.variable_name, "variable_value": self.variable_value, "runid":self.runid, "container": self.container, "runids":self.runids, "timestamp": self.timestamp, "data":self.data, "figure_path":self.figure_path, "tag":self.tag}
def set_result_df(self):
self.result_df = {"measurement_name":self.measurement_name,
"analysis_name": self.analysis_name,
"analysis_version": self.analysis_version,
"variable_name": self.variable_name,
"variable_value": self.variable_value,
"runid":self.runid,
"container": self.container,
"runids":self.runids,
"timestamp": self.timestamp,
"data":self.data,
"figure_path":self.figure_path,
"tag":self.tag}
def get_result_records(self):
record_list = []
if self.result_dict is None:
self.set_result_dict()
result = self.result_dict
runids = "_".join(self.runids)
pmain = influxdb_client.Point(self.measurement_name).field(result['variable_name'], result['variable_value']).tag("datatype", "main").tag("analysis_name", result['analysis_name']).tag("analysis_version", result['analysis_version']).tag("variable_name",result['variable_name']).field("runid", result['runid']).tag("container",result['container']).field("runids",runids).field("timestamp",result['timestamp']).field("figure_path",result['figure_path']).tag("tag",result['tag'])
record_list.append(pmain)
if isinstance(self.result_dict['data'], dict):
for key, val in self.result_dict['data'].items():
if isinstance(val, np.ndarray):
for v in val:
p = influxdb_client.Point(self.measurement_name).field(result['variable_name'], result['variable_value']).tag("datatype", "extra").tag("analysis_name", result['analysis_name']).tag("analysis_version", result['analysis_version']).tag("variable_name",result['variable_name']).field("runid", result['runid']).tag("container",result['container']).field("runids",runids).field("timestamp",result['timestamp']).field("figure_path",result['figure_path']).tag("tag",result['tag']).field(key,v)
record_list.append(p)
else:
p = influxdb_client.Point(self.measurement_name).field(result['variable_name'], result['variable_value']).tag("datatype", "extra").tag("analysis_name", result['analysis_name']).tag("analysis_version", result['analysis_version']).tag("variable_name",result['variable_name']).field("runid", result['runid']).tag("container",result['container']).field("runids",runids).field("timestamp",result['timestamp']).field("figure_path",result['figure_path']).tag("tag",result['tag']).field(key,val)
record_list.append(p)
return record_list
def query_from_result(self):
query = 'from(bucket:"xom")|> range(start: '+ str(-constant.query_period) + 'd) |> filter(fn: (r) => r._measurement == \"' + self.measurement_name + '\") |> filter(fn: (r) => r.analysis_name == \"' + self.analysis_name + '\") |> filter(fn: (r) => r.variable_name == \"' + self.variable_name + '\") |> filter(fn: (r) => r.analysis_version == \"' + self.analysis_version + '\") |> filter(fn: (r) => r.runid == \"' + self.runid + '\") '
if dbl.query(query) is None:
print("XOM_ERROR")
else:
print("XOM_OK")
def save_in_db(self, record):
## connect to the database:
client = dbl.Xomdb('influxdb',self.measurement_name)
client.insert_record(record)
# def test_if_written(self, record):
def save(self):
record = self.get_result_records()
self.save_in_db(record)
def xom_message(self,success):
if success == True:
print(success_message)
if success == False:
print(error_message)
# def xom_saver(
# analysis_name, var_name, runid, var_value,
# runids = None,
# timestamp = None,
# data=None,
# figure=None,
# datatype = None,
# tag = None,
# save_folder = None,
# db = 'influxdb'
# ):
# """ simple function to save results in xom fomat
# :param analysis_name: name of the variable of the analysis
# :param runids: optional array for several run ids
# :param var_name: name of the variable of the analysis
# :param var_value: variable you want to display
# :param timestamp:
# :param data: additionnal array of data (can be either [val, error, chi2] , can be [x1, x2, x3 ..., x10] bin content etc)
# :param figure: figure object one want to display in the xom display
# :param tag: analyse specific tag
# :param save_folder: folder to save the json data in
# """
# # server_address = constant.server_address
# if save_folder == None:
# save_folder = constant.output_folder + './tmp/'
# container = os.getenv('SINGULARITY_NAME')
# result = {}
# result['analysis_name'] = analysis_name
# if timestamp:
# result['timestamp'] = int(timestamp/1e9)
# result['runid'] = int(runid)
# result['var_name'] = var_name
# result['container'] = container
# result['value'] = var_value
# result['tag'] = tag
# result['type'] = datatype
# result['data'] = data
# ## standard json filename to be written in case the database connections fails
# outfname = result['analysis_name']+ "_" + result['var_name']+'_'+str(result['runid']) + '_' + result['container']
# outjsonname = outfname +'.json'
# ## save the JSON file on disk
# filename = save_folder + outjsonname
# with open(filename,'w') as f:
# json.dump(result,f)
# f.close()
# ## save the figure
# if figure:
# figname = 'fig_' + result['var_name']+'_'+str(result['runid']) + '_' + 'cont_' + result['container'] + '_' + tag +'.png'
# figpath = save_folder + figname
# figure.savefig(figpath)
# #check if written
# return True
# def xom_message(sucess):
# if sucess == True:
# print("PROCESSEDWITHXOM")
# if sucess == False:
# print("ERRWITHXOM")
# # # copy fig somewhere, for the moment at LNGS
# # process = subprocess.Popen(['scp', figpath, '<EMAIL>:'+ display_fig_folder],
# # stdout=subprocess.PIPE,
# # universal_newlines=True)
<file_sep>from datetime import datetime as dt
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input
from dash.dependencies import Output
from app.utils import getdata, getalldata, getvariables
from app.utils import make_dash_table, create_plot, create_plot_with_runid, create_plot_errorx
import app.utils as utils
import os
import base64
df = getalldata()
initvar = 'lightyield'
default_strax = '2.1.1'
default_straxen = '1.2.3'
dftemp = df.loc[ ( df['variable_name']==initvar ) & (df['strax_version']==default_strax) ]
dfvar = getvariables()
dfvartemp = dfvar.loc[ (dfvar['variable_name']==initvar) ]
process_dict = {var:leg for (var, leg) in (zip(dfvar['variable_name'], dfvar['legend_name']) )}
unit_dict = {var:unit for (var, unit) in (zip(dfvar['variable_name'], dfvar['unit']) )}
FIGURE = create_plot(
x=dftemp["timestamp"].astype('int'),
xlabel='time',
y=dftemp["value"],
ylabel=dfvartemp['legend_name'][0],
error=dftemp["error"],
figname=df["figname"]
)
FIGURE_WITH_RUNID = create_plot_with_runid(
x=dftemp["timestamp"],
xrunid=dftemp["run_id"],
xlabel='Time Stamp',
y=dftemp["value"],
ylabel=dfvartemp['legend_name'][0],
yunit=dfvartemp['unit'][0],
error=dftemp["error"],
figname=df["figname"]
)
df = getalldata()
def make_version_plot(dftemp):
figure={
'data': [
dict(
x=dftemp[dftemp['strax_version'] == i]['run_id'],
y=dftemp[dftemp['strax_version'] == i]['value'],
error_y=
dict(
type='data', # value of error bar given in data coordinates
array=dftemp[dftemp['strax_version'] == i]['error'],
visible=True),
# error_y = dftemp[dftemp['version'] == i]['error'],
visible=True,
mode='markers',
opacity=0.7,
marker={
'size': 5,
'line': {'width': 0.5, 'color': 'white'}
},
name=i
) for i in dftemp.strax_version.unique()
],
'layout': dict(
# xaxis={'title': 'time'},
yaxis={'title': 'hahah'},
margin={'l': 40, 'b': 40, 't': 10, 'r': 10},
legend={'x': 0, 'y': 1},
hovermode='closest'
)
}
return figure
# layout = html.Div([
# html.Div([
# html.Div([
# html.A(
# html.H2("Back to home page"),
# id="home_page",
# href="https://xe1t-offlinemon.lngs.infn.it/dash/",
# ),
# ],style={'width':'30%','float':'left'}),
# html.Div([
# html.A(
# html.H2(children="logout",style={'margin-left':'50%'}),
# id="logout",
# href="https://xe1t-offlinemon.lngs.infn.it/logout/",
# ),
# ],style={'width':'30%','float':'right'}),
# ]),
# # to be fixed: the space organisation isn't very clean.
# # I found this quick fix to put the text where I want but it is not very satisfactory
# html.Br(),
# html.Br(),
# html.Br(),
# html.P(html.H1('Test data')),
image_filename = '/home/xom/xom/frontend/app/assets/logo_xenon.png'
image_svg = '/home/xom/xom/frontend/app/assets/xenonlogo.svg'
logo = '/home/xom/xom/frontend/app/assets/xenonlogo.png'
def b64_image(image_filename):
with open(image_filename, 'rb') as f:
image = f.read()
return 'data:image/png;base64,' + base64.b64encode(image).decode('utf-8')
encoded_image = base64.b64encode(open(image_svg, 'rb').read()).decode()
def b64_imagesvg(image_filename):
with open(image_filename, 'rb') as f:
image = f.read()
return 'data:image/svg;base64,' + base64.b64encode(image).decode('utf-8')
variable_option = [{'label':leg, 'value':var} for leg, var in zip(dfvar['legend_name'], dfvar['variable_name'])]
strax_version = utils.getstraxversion()
print('strax_version = ', strax_version)
version_option = [{'label':lab, 'value':val} for lab, val in zip(strax_version, strax_version)]
layout = html.Div(className="body",children=[
html.Div( className='navbar' ,children=[
html.Div( className='container' , children=[
html.Div( className='logodiv', children=[
# html.A("Link to external site",[ html.Img(className='logoim', src=b64_image(logo)), href='https://xe1t-offlinemon.lngs.infn.it/'])
html.A(html.Img(className='logoim', src=b64_image(logo)), href='https://xe1t-offlinemon.lngs.infn.it/'),
html.A([html.Span("X"), "enon ",html.Span("O"),"ffline ",html.Span("M"),"onitoring" ], href='https://xe1t-offlinemon.lngs.infn.it/',style={'position':'absolute' ,'margin-left':'1.9em','font-size':22}),
]), #logodiv
]) #container
]), #navbar
html.P(html.H2('Version Comparator'),style={'text-align': 'center'}),
html.Div([
dcc.Dropdown(
id='process-dropdown',
options=variable_option,
value=dfvar['variable_name'][0],
clearable=False
),
dcc.Dropdown(
id='version-dropdown',
options=version_option,
multi=True,
clearable=False,
value=[default_strax]
)
],style={'width': '38%', 'margin': 'auto'}),
# end dropdown div process
html.Div([
dcc.Graph(id='my-graph',
hoverData={"points": [{"pointNumber":0}]},
figure=make_version_plot(dftemp))
],style={'width': '70%', 'display': 'center','margin': 'auto'}),
# end div (graph+ dropdown)
# # dropdown div for process
# html.Div([
# dcc.Dropdown(
# id='process-dropdownx',
# options=variable_option,
# value=dfvar['variable_name'][0],
# clearable=False
# ),
# dcc.Dropdown(
# id='version-dropdown',
# options=[
# {'label': 'version 1.0', 'value': 'v1.0'},
# {'label': 'version 2.0', 'value': 'v2.0'},
# {'label': 'version 3.0', 'value': 'v3.0'},
# {'label': 'version 4.0', 'value': 'v4.0'},
# ],
# multi=True,
# clearable=False,
# value=['v4.0']
# )
# ],
# ] ,style={'width': '38%', 'margin': 'auto'})
# html.Div([
# html.Div([
# html.P(html.H2('Electron lifetime')),
# dcc.Dropdown(
# id='version-dropdown',
# options=[
# {'label': 'version 1.0', 'value': 'v1.0'},
# {'label': 'version 2.0', 'value': 'v2.0'},
# {'label': 'version 3.0', 'value': 'v3.0'},
# {'label': 'version 4.0', 'value': 'v4.0'},
# ],
# multi=True,
# clearable=False,
# value=['v4.0']
# ),
# html.Div([
# dcc.Graph(id='graph-lifetime',figure=make_version_plot(df,'el_lifetime'))
# ]),
# ], style={'width': '45%','display': 'inline-block','float':'left'}),
# html.Div([
# html.P(html.H2('Charge yield')),
# dcc.Dropdown(
# id='version-dropdown2',
# options=[
# {'label': 'version 1.0', 'value': 'v1.0'},
# {'label': 'version 2.0', 'value': 'v2.0'},
# {'label': 'version 3.0', 'value': 'v3.0'},
# {'label': 'version 4.0', 'value': 'v4.0'},
# ],
# multi=True,
# clearable=False,
# value=['v4.0']
# ),
# html.Div([
# dcc.Graph(id='graph-chargeyield',figure=make_version_plot(df,'charge_yield') )
# ]),
# ] , style={'width': '45%','display': 'inline-block','float':'right'}),
# ]),
])
def register_callbacks(dashapp):
#callback for the main plot
# return 0
@dashapp.callback(Output('my-graph', 'figure'), [Input('version-dropdown', 'value'), Input('process-dropdown', 'value')])
def update_graph(version_value, variable_name):
# version_value = '2.1.2'
print('version_value = ', version_value)
dftemp = df.loc[(df['variable_name'] == variable_name) ]
dftemp = dftemp[(dftemp['strax_version'].isin(version_value))]
return make_version_plot(dftemp)
# return create_plot_with_runid(
# x=dftemp["timestamp"],
# xrunid=dftemp["run_id"],
# xlabel='Time Stamp',
# y=dftemp["value"],
# ylabel=process_dict[variable_name],
# yunit = unit_dict[variable_name],
# error=dftemp["error"],
# figname=dftemp["figname"]
# )
#
<file_sep>from datetime import datetime as dt
import plotly.graph_objects as go
import pandas_datareader as pdr
from dash.dependencies import Input
from dash.dependencies import Output
from app.utils import getdata
<file_sep>import strax
import straxen
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from argparse import ArgumentParser
import argparse
import sys
import time
import cutax
sys.path +=['../../../utils/']
import xomlib
def press_run(runid):
print(runid)
sc = straxen.SCADAInterface()
parameters = {'Pcryo':'XE1T.CRY_PT101_PCHAMBER_AI.PI'}
st = cutax.xenonnt_online(_rucio_local_path='/project/lgrandi/rucio', include_rucio_local = True)
st.storage += [strax.DataDirectory('/project2/lgrandi/xenonnt/processed', provide_run_metadata=True)]
# st = straxen.contexts.xenonnt_online()
sc.context = st
run_number = str(runid)
dfbg = sc.get_scada_values(parameters, run_id= run_number, every_nth_value=1)
data = dfbg.to_numpy()
mean = np.mean(data)
xomresult = xomlib.Xomresult(analysis_name="test_scada",
analysis_version = "v0.0",
variable_name='XE1T.CRY_PT101_PCHAMBER_AI.PI',
variable_value=mean,
runid=runid
)
xomresult.save()
time.sleep(20)
xomresult.xom_message(success=True)
# if(xom_saver("test_scada",'XE1T.CRY_PT101_PCHAMBER_AI.PI',runid,mean, datatype="main")):
# xom_message(sucess=True)
# else:
# xom_message(sucess=False)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("runid",type=int,help='runid')
args = parser.parse_args()
print(args.runid)
parser = ArgumentParser()
press_run(args.runid)
if __name__ == "__main__":
main()
<file_sep>from datetime import datetime as dt
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input
from dash.dependencies import Output
import dash_bootstrap_components as dbc
from app.utils import getdata, getalldata, getvariables
from app.utils import make_dash_table, create_plot, create_plot_with_runid, create_plot_errorx
import os
import base64
df = getalldata()
initvar = 'lightyield'
default_strax = '2.1.1'
default_straxen = '1.2.3'
dftemp = df.loc[ ( df['variable_name']==initvar ) & (df['strax_version']==default_strax) & ( df['straxen_version']==default_straxen) ]
dfvar = getvariables()
dfvartemp = dfvar.loc[ (dfvar['variable_name']==initvar) ]
print('tes ======== ', dfvartemp)
process_dict = {var:leg for (var, leg) in (zip(dfvar['variable_name'], dfvar['legend_name']) )}
unit_dict = {var:unit for (var, unit) in (zip(dfvar['variable_name'], dfvar['unit']) )}
print('process_dict = ' , process_dict)
image_folder = '/xom/images/'
FIGURE = create_plot(
x=dftemp["timestamp"].astype('int'),
xlabel='time',
y=dftemp["value"],
ylabel=dfvartemp['legend_name'][0],
error=dftemp["error"],
figname=dftemp["figname"]
)
FIGURE_WITH_RUNID = create_plot(
#FIGURE_WITH_RUNID = create_plot_with_runid(
# x=dftemp["timestamp"],
# xrunid=dftemp["run_id"],
x=dftemp["run_id"],
xlabel='Time Stamp',
y=dftemp["value"],
ylabel=dfvartemp['legend_name'][0],
# yunit=dfvartemp['unit'][0],
error=dftemp["error"],
figname=df["figname"]
)
image_filename = '/home/xom/xom/frontend/app/assets/logo_xenon.png'
image_svg = '/home/xom/xom/frontend/app/assets/xenonlogo.svg'
logo = '/home/xom/xom/frontend/app/assets/xenonlogo.png'
def b64_image(image_filename):
with open(image_filename, 'rb') as f:
image = f.read()
return 'data:image/png;base64,' + base64.b64encode(image).decode('utf-8')
def decode_image(image_path):
return base64.b64encode(open(image_path, 'rb').read()).decode()
encoded_image = base64.b64encode(open(image_svg, 'rb').read()).decode()
def b64_imagesvg(image_filename):
with open(image_filename, 'rb') as f:
image = f.read()
return 'data:image/svg;base64,' + base64.b64encode(image).decode('utf-8')
variable_option = [{'label':leg, 'value':var} for leg, var in zip(dfvar['legend_name'], dfvar['variable_name'])]
layout = html.Div(className="body",children=[
html.Div( className='navbar' ,children=[
html.Div( className='container' , children=[
html.Div( className='logodiv', children=[
# html.A("Link to external site",[ html.Img(className='logoim', src=b64_image(logo)), href='https://xe1t-offlinemon.lngs.infn.it/'])
html.A(html.Img(className='logoim', src=b64_image(logo)), href='https://xe1t-offlinemon.lngs.infn.it/'),
html.A([html.Span("X"), "enon ",html.Span("O"),"ffline ",html.Span("M"),"onitoring" ], href='https://xe1t-offlinemon.lngs.infn.it/',style={'position':'absolute' ,'margin-left':'1.9em','font-size':22}),
]), #logodiv
]) #container
]), #navbar
html.P(html.H2('Quick View'),style={'text-align': 'center'}),
# dropdown div for process
html.Div([
dcc.Dropdown(
id='process-dropdownx',
options=variable_option,
value=dfvar['variable_name'][0],
clearable=False
),
],style={'width': '38%', 'margin': 'auto'}),
# end dropdown div process
html.Div([
dcc.Graph(id='my-graph',
hoverData={"points": [{"pointNumber":0}]},
figure=FIGURE_WITH_RUNID)
],style={'width': '70%', 'display': 'center','margin': 'auto'}),
# end div (graph+ dropdown)
# div hover plot
html.Div([
html.P(html.H2('matching graph'),style={'float':'right'}),
html.Img(id='embedded_plot',
src=''.format(decode_image(logo)),style={'width': '100%', 'display': 'inline-block','height':'auto','vertical-align':'middle'})
],style={'width': '30%' ,'display': 'inline-block','vertical-align':'top'}),
# end div hover plot
]) #body
# 'el_lifetime':'Electron Lifetime [us]','charge_yield': 'Charge Yield [p.e./keV]','light_yield':'Light Yield [p.e./keV]'}
def register_callbacks(dashapp):
#callback for the main plot
@dashapp.callback(Output('my-graph', 'figure'), [Input('process-dropdownx', 'value')])
def update_graph(variable_name):
dftemp = df.loc[(df['variable_name'] == variable_name) & (df['strax_version']==default_strax) & ( df['straxen_version']==default_straxen) ]
dfvartemp = dfvar.loc[ (dfvar['variable_name']== variable_name) ]
print('dfvartemp = ', dfvartemp)
# return create_plot_with_runid(
return create_plot(
# x=dftemp["timestamp"],
x=dftemp["run_id"],
# xrunid=dftemp["run_id"],
xlabel='Time Stamp',
y=dftemp["value"],
ylabel=process_dict[variable_name],
# yunit = unit_dict[variable_name],
error=dftemp["error"],
figname=dftemp["figname"]
)
@dashapp.callback(Output("embedded_plot", "src"),[Input("my-graph", "hoverData"), Input('process-dropdownx', 'value')])
def picture_on_hover(hoverData,variable_name):
"""
params hoverData: data on graph hover, and dropdowns
update the graph as the users passes the mouse on a point or the user changes the drop down values.
"""
print('tata')
if hoverData is None:
raise PreventUpdate
try:
dftemp = df.loc[(df['variable_name'] == variable_name) & (df['strax_version']==default_strax) & ( df['straxen_version']==default_straxen) ]
dfvartemp = dfvar.loc[ (dfvar['variable_name']== variable_name) ]
print('dfvartemp = ', dfvartemp)
# figtemp = create_plot_with_runid(
figtemp = create_plot(
# x=dftemp["timestamp"],
# xrunid=dftemp["run_id"],
x=dftemp["run_id"],
xlabel='Time Stamp',
y=dftemp["value"],
ylabel=process_dict[variable_name],
# yunit = unit_dict[variable_name],
error=dftemp["error"],
figname=dftemp["figname"]
)
# gets the index the point on which the mouse is on
point_number = hoverData["points"][0]["pointNumber"]
# gets the corresponding figure name
print('figtemp["data"] = ', figtemp["data"])
figname = str(figtemp["data"][0]["text"].values[point_number]).strip()
image_path = image_folder +figname
print('image+path = ', image_path)
encoded_image = b64_image(image_path)
# the way I found to print out the figure...
# return 'data:image/png;base64,{}'.format(encoded_image.decode())
return encoded_image
except Exception as error:
print(error)
raise PreventUpdate
<file_sep>import argparse
import strax
import straxen
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sys
st = straxen.contexts.xenonnt()
sys.path +=['../../../utils/']
import xomlib
def main():
parser = argparse.ArgumentParser("RunXom")
parser.add_argument("runs", nargs='+',help="Run number to process")
parser.add_argument("--container",help=" will fill the xom data base with the container str", default='unfilled')
args = parser.parse_args()
runs = args.runs
container = args.container
run_id = str(runs[0]).zfill(6)
######################
### loading the df ###
######################
df = st.get_df(str(run_id),
targets = 'event_info',
progress_bar=True)
# except:
# print("couldn't load event_info")
##########################
### doing the analysis ###
##########################
#rate = 0.1 #Hz
dt = 60 #s
t_init = df['time'].iloc[0]
t_final = df['endtime'].iloc[-1]
deltat_ns = t_final -t_init
deltat_s = deltat_ns/1e9
dt_ns = dt*1e9
nbins = int(deltat_ns/dt_ns)
rate_total = len(df)/deltat_s
print('tot rate = ', rate_total)
### preparation of the variables ###
value = rate_total
timestamp = df['time'].iloc[0]
rates_all = df['time'].value_counts(bins=nbins,sort=False).values
rate_1 = df.query("e_ces < 1")
df[df['e_ces'] < 1]['time'].value_counts(bins=nbins,sort=False).values
rates_10 = df[df['e_ces'] > 10]['time'].value_counts(bins=nbins,sort=False).values
rates_100 = df[df['e_ces'] > 100]['time'].value_counts(bins=nbins,sort=False).values
rates_1000 = df[df['e_ces'] > 1000]['time'].value_counts(bins=nbins,sort=False).values
array_data = {'bins':nbins, 'rates_all':rates_all.tolist(),'rates_10': rates_10.tolist(), 'rates_100':rates_100.tolist(), 'rates_1000':rates_1000.tolist()}
rate_10 = len(rates_1)/deltat_s
rate_10 = len(rates_10)/deltat_s
rate_100 = len(rates_100)/deltat_s
rate_1000 = len(rates_1000)/deltat_s
### xomdb filling ###
xomresult = xomlib.Xomresult(analysis_name="evt_rate",
analysis_version = "v0.0",
variable_name='evt_rate',
variable_value=mean,
runid=runid,
data = {"rate_10": rate_10,"rate_100": rate_100,"rate_1000": rate_1000}
)
xomresult.save()
return 0
if __name__ == "__main__":
main()
<file_sep>import os
basedir = os.path.abspath(os.path.dirname(__file__))
class BaseConfig:
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
MONGO_URI =os.environ.get('MONGODB_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = os.environ['SECRET_KEY']
LDAP_USERNAME = os.environ.get('LDAP_USERNAME')
LDAP_PASSWORD = os.environ.get('LDAP_PASSWORD')
LDAP_HOST = os.environ.get('LDAP_HOST')
LDAP_PORT = os.environ.get('LDAP_PORT')
LDAP_BASE_DN = os.environ.get('LDAP_BASE_DN')
LDAP_SCHEMA = os.environ.get('LDAP_SCHEMA')
LDAP_USER_OBJECT_FILTER = os.environ.get('LDAP_USER_OBJECT_FILTER')
LDAP_PROTOCOL_VERSION = 3
LDAP_OPENLDAP=os.environ.get('LDAP_OPENLDAP')
<file_sep>import configparser
import shlex
import subprocess
import constant
import locklib as ll
import dblib as dbl
import importlib
import os
def get_xom_config(configname='xomconfig.cfg'):
xomconfig = configparser.ConfigParser()
xomconfig.sections()
configfilename = configname
xomconfig.read(constant.xomfolder + '/utils/' + configfilename)
print(xomconfig.sections())
return xomconfig
def get_from_config(xomconfig, analysis_name, item_to_return):
# logger.info('set up config with file %s: ', constant.configname)
analysis_names = xomconfig.sections()
print(analysis_names)
if analysis_name not in analysis_names:
print("error in analyis name")
else:
if xomconfig.has_option(analysis_name,item_to_return):
item_returned = xomconfig.get(analysis_name,item_to_return)
if item_to_return in ['exclude_tags', 'include_tags', 'container', 'available_type']:
item_returned = item_returned.split(',')
return item_returned
else:
return ""
# def fill_from_config(self, xomconfig):
# analysis_version = xomconfig.get(self.analysis_name,'analysis_version')
# containerlist = xomconfig.get(self.analysis_name,'container')
# self.container_list = containerlist.split(',')
# if xomconfig.has_option(self.analysis_name,'include_tags'):
# include_tags_list = xomconfig.get(self.analysis_name,'include_tags')
# self.include_tags_list = include_tags_list.split(',')
# if xomconfig.has_option(self.analysis_name,'available_type'):
# available_type_list = xomconfig.get(self.analysis_name,'available_type')
# self.available_type_list = available_type_list.split(',')
# variablelist = xomconfig.get(self.analysis_name,'variable_name')
# self.variable_list = variablelist.split(',')
# self.runwise = xomconfig.getboolean(self.analysis_name,'runwise')
# self.folder = xomconfig.get(self.analysis_name,'folder')
# self.command = xomconfig.get(self.analysis_name,'command')
# if xomconfig.has_option(self.analysis_name,'min_run'):
# self.min_run = int(xomconfig.get(self.analysis_name,'min_run'))
# if xomconfig.has_option(self.analysis_name,'max_run'):
# self.max_run = int(xomconfig.get(self.analysis_name,'max_run'))
# def print_config(self):
# print(f"##### Analysis: {self.analysis_name} version {self.analysis_version} ##########")
# print("variable list =", self.variable_list)
# print("container list =", self.container_list)
# print("exclude_tags list =", self.exclude_tags_list)
# print("include_tags list =", self.include_tags_list)
# print("runwise = ", self.runwise)
# print("command =", self.command)
# print("###################################################")
# xomconfig = get_xom_config(constant.configname)
# logger.info('set up config with file %s: ', constant.configname)
# analysis_names = xomconfig.sections()
# ##############################################
# ### filling a list with analysis instances ###
# ##############################################
# analysis_list = []
# for analysis_name in analysis_names:
<file_sep>#!/usr/bin/env python
# coding: utf-8
import strax
import straxen
import math
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
sc = straxen.SCADAInterface()
parameters = {'Cathode': 'XE1T.GEN_HEINZVMON.PI',
'PMT11': 'XE1T.CTPC.Board06.Chan011.VMon',
'temp101': 'XE1T.CRY_TE101_TCRYOBOTT_AI.PI',
'pt101': 'XE1T.CRY_PT101_PCHAMBER_AI.PI',
'SLM1': 'XE1T.GEN_CE911_SLM1_HMON.PI',
'FC104': 'XE1T.CRY_FCV104FMON.PI',
'Anode': 'XE1T.CTPC.Board14.Chan001.VMon'
}#parametre de l'Anode ajoute au code original
#start = 1609682275000000000
#end= 1609736527000000000
st = straxen.contexts.xenonnt()
run_id_int = 44700
run_id = str(run_id_int).zfill(6)
dfevent = st.get_df(run_id,
targets = 'event_info',
progress_bar=True)
start = dfevent['time'].iloc[0]
end = dfevent['endtime'].iloc[-1]
df = sc.get_scada_values(parameters, start=start, end=end, every_nth_value=1)
print(df.head())
def rmse(targets,time,units):
mean_value=np.array(targets).mean()
sigma=np.sqrt(((mean_value - np.array(targets)) ** 2).mean())
plt.plot(targets,time,'o', label=str(round(mean_value, 3))+'+\-'+str(round(sigma,3)),units)
plt.legend()
plt.show()
return (mean_value,sigma)
#on peut definir des unites
units=['V','charge','C','bar',' ',' ','V']
u=-1
for parameter in df.head():
u=+1
times=dfevent['time'].values
rmse(df[str(parameter)],times,units[u])
t=[1,2,3,5,7,8]
tt=[1,2,3,4,5,6]
rmse(t,tt)
<file_sep>__version__ = '0.7.3'
from . import config
# try loading config, if it doesn't work then set uconfig to None
# this is needed so that strax(en) CI tests will work even without a config file
uconfig = config.Config()
if uconfig.is_configured:
logger = config.setup_logger(uconfig.logging_level)
else:
uconfig = None
logger = config.setup_logger()
from .rundb import DB, xent_collection, xe1t_collection
from .mongo_files import MongoUploader, MongoDownloader, APIUploader, APIDownloader
<file_sep>version = "0"
measurement_name = "xomdata"
type_of_db = 'influxdb'
server_address = {'dali':"172.16.58.3",'lngs':"127.0.0.1"}
database_list = ['influxdb', 'mongodb']
singularity_base = "singularity exec --bind /cvmfs/ --bind /project/ --bind /project2/ --bind /scratch/midway2/gaior --bind /dali /project2/lgrandi/xenonnt/singularity-images/"
configname = 'xomconfig.cfg'
figfolder = "/home/xom/data/v1.1/test/."
exec_period = 300
query_period = 100 # days period over which we should search in database (when xom will be running smoothly can beset to 1d)
xomfolder = "/home/gaior/codes/xom/"
output_folder = xomfolder + "/output/"
job_folder = output_folder + "/job_files/"
analysis_code_folder = "/home/gaior/codes/xom/backend/algorithms/"
availability_files_folder = output_folder + "/availability_files/"
container_path = "/project2/lgrandi/xenonnt/singularity-images/"
example_sub = "/home/gaior/codes/xom/utils/job_ex.sh"
jobslimit = 10
job_partition = 'broadwl'
<file_sep>import subprocess
import os
import tempfile
import shlex
from utilix import logger
import getpass
sbatch_template = """#!/bin/bash
#SBATCH --job-name={jobname}
#SBATCH --output={log}
#SBATCH --error={log}
#SBATCH --account=pi-lgrandi
#SBATCH --qos={qos}
#SBATCH --partition={partition}
#SBATCH --mem-per-cpu={mem_per_cpu}
#SBATCH --cpus-per-task={cpus_per_task}
{node}
{exclude_nodes}
{hours}
{job}
"""
SINGULARITY_DIR = {
'dali': '/dali/lgrandi/xenonnt/singularity-images',
'lgrandi': '/project2/lgrandi/xenonnt/singularity-images',
'xenon1t': '/project2/lgrandi/xenonnt/singularity-images',
'broadwl': '/project2/lgrandi/xenonnt/singularity-images',
'kicp': '/project2/lgrandi/xenonnt/singularity-images',
}
TMPDIR = {
'dali': os.path.expanduser('/dali/lgrandi/%s/tmp'%(getpass.getuser())),
'lgrandi': os.path.join(os.environ.get('SCRATCH', '.'), 'tmp'),
'xenon1t': os.path.join(os.environ.get('SCRATCH', '.'), 'tmp'),
'broadwl': os.path.join(os.environ.get('SCRATCH', '.'), 'tmp'),
'kicp': os.path.join(os.environ.get('SCRATCH', '.'), 'tmp'),
}
def overwrite_dali_bind(bind, partition):
"""Check if we are binding non-dali storage when we are on dali compute node. If yes, then overwrite"""
if partition == 'dali':
bind = ['/dali',
'/dali/lgrandi/xenonnt/xenon.config:/project2/lgrandi/xenonnt/xenon.config',
'/dali/lgrandi/grid_proxy/xenon_service_proxy:/project2/lgrandi/grid_proxy/xenon_service_proxy'
]
print("You are using dali parition, and your bind has been fixed to %s"%(bind))
return bind
def wrong_log_dir(path):
"""Check if the directory is NOT in dali"""
abs_path = os.path.abspath(path)
top_level = abs_path.split('/')[1]
wrong_log_dir = False
if top_level != 'dali':
wrong_log_dir = True
else:
wrong_log_dir = False
return wrong_log_dir
def overwrite_dali_job_log(path, partition):
if partition == 'dali':
if wrong_log_dir(path):
logname = os.path.abspath(path).split('/')[-1]
new_path = TMPDIR['dali']+'/'+logname
print('Your log is relocated at: %s'%(new_path))
return new_path
else:
return path
else:
print('Your log is located at: %s'%(os.path.abspath(path)))
return path
def make_executable(path):
"""Make the file at path executable, see """
mode = os.stat(path).st_mode
mode |= (mode & 0o444) >> 2 # copy R bits to X
os.chmod(path, mode)
def singularity_wrap(jobstring, image, bind, partition):
"""Wraps a jobscript into another executable file that can be passed to singularity exec"""
file_descriptor, exec_file = tempfile.mkstemp(suffix='.sh', dir=TMPDIR[partition])
make_executable(exec_file)
os.write(file_descriptor, bytes('#!/bin/bash\n' + jobstring, 'utf-8'))
bind_string = " ".join([f"--bind {b}" for b in bind])
image = os.path.join(SINGULARITY_DIR[partition], image)
new_job_string = f"""singularity exec {bind_string} {image} {exec_file}
rm {exec_file}
"""
os.close(file_descriptor)
return new_job_string
def submit_job(jobstring,
log='job.log',
partition='xenon1t',
qos='xenon1t',
account='pi-lgrandi',
jobname='somejob',
sbatch_file=None,
dry_run=False,
mem_per_cpu=1000,
container='xenonnt-development.simg',
bind=['/project2/lgrandi/xenonnt/dali:/dali', '/project2', '/project', '/scratch/midway2/%s'%(getpass.getuser()), '/scratch/midway3/%s'%(getpass.getuser())],
cpus_per_task=1,
hours=None,
node=None,
exclude_nodes=None,
**kwargs
):
"""
Submit a job to the dali/midway batch queue
EXAMPLE
from utilix import batchq
import time
job_log = 'job.log'
batchq.submit_job('echo "say hi"', log=job_log)
time.sleep(10) # Allow the job to run
for line in open(job_log):
print(line)
:param jobstring: the command to execute
:param log: where to store the log file of the job
:param partition: partition to submit the job to
:param qos: qos to submit the job to
:param account: account to submit the job to
:param jobname: how to name this job
:param sbatch_file: where to write the job script to
:param dry_run: only print how the job looks like
:param mem_per_cpu: mb requested for job
:param container: name of the container to activate
:param bind: which paths to add to the container. This is immutable when you specified dali as partition
:param cpus_per_task: cpus requested for job
:param hours: max hours of a job
:param node: define a certain node to submit your job should be submitted to
:param exclude_nodes: define a list of nodes which should be excluded from submission
:param kwargs: are ignored
:return: None
"""
if 'delete_file' in kwargs:
logger.warning('"delete_file" option for "submit_job" has been removed, ignoring for now')
os.makedirs(TMPDIR[partition], exist_ok=True)
# overwrite bind to make sure dali is isolated
bind = overwrite_dali_bind(bind, partition)
# overwrite log directory if it is not on dali and you are running on dali.
log = overwrite_dali_job_log(log, partition)
# temporary dirty fix. will remove these 3 from xenon1t soon.
if partition == 'xenon1t':
if exclude_nodes is None:
exclude_nodes = 'dali0[28-30]'
else:
exclude_nodes += ',dali028,dali029,dali030'
if container:
# need to wrap job into another executable
jobstring = singularity_wrap(jobstring, container, bind, partition)
jobstring = 'unset X509_CERT_DIR CUTAX_LOCATION\n' + 'module load singularity\n' + jobstring
if not hours is None:
hours = '#SBATCH --time={:02d}:{:02d}:{:02d}'.format(int(hours), int(hours * 60 % 60), int(hours * 60 % 60 * 60 % 60))
else:
hours = ''
if not node is None:
if not isinstance(node, str):
raise ValueError(f'node should be str but given {type(node)}')
node = '#SBATCH --nodelist={node}'.format(node=node)
else:
node = ''
if not exclude_nodes is None:
if not isinstance(exclude_nodes, str):
raise ValueError(f'exclude_nodes should be str but given {type(exclude_nodes)}')
# string like 'myCluster01,myCluster02,myCluster03' or 'myCluster[01-09]'
exclude_nodes = '#SBATCH --exclude={exclude_nodes}'.format(exclude_nodes=exclude_nodes)
else:
exclude_nodes = ''
sbatch_script = sbatch_template.format(jobname=jobname, log=log, qos=qos, partition=partition,
account=account, job=jobstring, mem_per_cpu=mem_per_cpu,
cpus_per_task=cpus_per_task, hours=hours, node=node,
exclude_nodes=exclude_nodes)
if dry_run:
print("=== DRY RUN ===")
print(sbatch_script)
return
if sbatch_file is None:
remove_file = True
_, sbatch_file = tempfile.mkstemp(suffix='.sbatch')
else:
remove_file = False
with open(sbatch_file, 'w') as f:
f.write(sbatch_script)
command = "sbatch %s" % sbatch_file
if not sbatch_file:
print("Executing: %s" % command)
subprocess.Popen(shlex.split(command)).communicate()
if remove_file:
os.remove(sbatch_file)
def count_jobs(string=''):
output = subprocess.check_output(shlex.split("squeue -u %s" % username))
lines = output.decode('utf-8').split('\n')
return len([job for job in lines if string in job])
<file_sep>
#!/usr/bin/env python
import os
#import numpy as np
import time
import subprocess
import shlex
import sys
sys.path +=['../utils/']
#import locklib as ll
import constant
import utils
import dblib as dbl
import xomlib
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
from argparse import ArgumentParser
import glob
import time
#import analysis
#a = {'ana name':{"container name": [list], "container 2": [list]} }
def main():
xomconfig = utils.get_xom_config()
analysis_names = xomconfig.sections()
analysis_list = []
for analysis_name in analysis_names:
containers = utils.get_from_config(xomconfig,analysis_name, 'container')
exclude_tags = utils.get_from_config(xomconfig,analysis_name, 'exclude_tags')
include_tags = utils.get_from_config(xomconfig,analysis_name, 'include_tags')
available_type = utils.get_from_config(xomconfig,analysis_name, 'available_type')
args = {}
if exclude_tags:
args[' --excluded '] = exclude_tags
if include_tags:
args[' --included '] = include_tags
if available_type:
args[' --available '] = available_type
for cont in containers:
command = "python test_data.py " + ' --container ' + cont + ' --analysis ' + analysis_name
for key, value in zip(args.keys(), args.values()) :
command+= key
command+= " ".join(value)
#+ "--excluded " + " ".join(exclude_tags) + ' --included ' + " ".join(include_tags) + ' --available ' + " ".join(available_type) + ' --analysis ' + analysis_name
allcommand = constant.singularity_base + cont + " " + command + '\n'
print(allcommand)
execcommand = shlex.split(allcommand)
process = subprocess.run(execcommand,
stdout=subprocess.PIPE,
universal_newlines=True)
print(process.stdout)
print(process.stderr)
# xeccommand = shlex.split(execcommand)
# process = subprocess.run(execcommand,
# stdout=subprocess.PIPE,
# universal_newlines=True)
if __name__ == "__main__":
main()
<file_sep>#!/usr/bin/env python
import os
from argparse import ArgumentParser
import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
from bson.json_util import dumps
import json
from datetime import timezone, datetime, timedelta
import strax
import straxen
import sys
import pprint
import numpy as np
import time
import configparser
import shlex
import subprocess
sys.path +=['../utils/']
import constant
import locklib as ll
import dblib as dbl
import importlib
import analysis as analysis
#analysismodule = importlib.import_module("analysis")
import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
def connect_to_DAQ_DB(collection='runs'):
rundb = pymongo_collection(collection)
return rundb
def main():
print()
print("--------------------------------------")
print("XOM DELETE DB ")
print("--------------------------------------")
print()
parser = ArgumentParser("proc_compare")
parser.add_argument("-db", type = str, help="what db to erase: todo, submitted,done, data")
args = parser.parse_args()
db = args.db
type_of_db = constant.type_of_db
if db == 'data':
xomdb = dbl.Xomdb(type_of_db,"xomdata")
xomdb.delete()
if db == 'todo':
xomdbtodo = dbl.Xomdb(type_of_db,"xomtodo")
xomdbtodo.delete()
if db == 'done':
xomdbdone = dbl.Xomdb(type_of_db,"xomdone")
xomdbdone.delete()
if db == 'submitted':
xomdbsubmitted = dbl.Xomdb(type_of_db,"xomsubmitted")
xomdbsubmitted.delete()
if __name__ == "__main__":
main()
<file_sep>import utilix
utilix.utilix.batchq.submit_job(dry_run=True)
# from pymongo import MongoClient
# from utilix.rundb import pymongo_collection
# from utilix.config import Config
# from bson.json_util import dumps
# import json
# import logging
# import sys
# rundb = pymongo_collection('runs')
# sys.path +=['../utils/']
# import xomlib
# import dblib as dbl
# import time
# import constant
<file_sep>#!/usr/bin/env python
import os
from argparse import ArgumentParser
from socket import timeout
import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
from bson.json_util import dumps
import json
from datetime import timezone, datetime, timedelta
import sys
import pprint
import numpy as np
import time
import configparser
import shlex
import subprocess
sys.path +=['../utils/']
import constant
import locklib as ll
import dblib as dbl
import importlib
import analysis as analysis
#analysismodule = importlib.import_module("analysis")
import logging
from logging.handlers import TimedRotatingFileHandler
logger = logging.getLogger('proc_compare')
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_level = 10
handler = TimedRotatingFileHandler('../logs/proc_compare.log', when="midnight", interval=1)
logger.setLevel(log_level)
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
# add a suffix which you want
handler.suffix = "%Y%m%d"
# finally add handler to logger
logger.addHandler(handler)
import glob
import pymongo
from pymongo import MongoClient
from utilix.rundb import pymongo_collection
from utilix.config import Config
def connect_to_DAQ_DB(collection='runs'):
rundb = pymongo_collection(collection)
return rundb
def get_last_runid(col,name_of_variable,query=None):
'''written only for the mongo db case '''
if query:
max = col.find(query).sort(name_of_variable,-1).limit(1)[0][name_of_variable]
else:
max = col.find().sort(name_of_variable,-1).limit(1)[0][name_of_variable]
return max
def get_max_mongodb(col,name_of_variable,query=None):
if query:
max = col.find(query).sort(name_of_variable,-1).limit(1)[0][name_of_variable]
else:
max = col.find().sort(name_of_variable,-1).limit(1)[0][name_of_variable]
return max
def get_xom_config(configname='xomconfig.cfg'):
xomconfig = configparser.ConfigParser()
xomconfig.sections()
configfilename = configname
xomconfig.read('../utils/' + configfilename)
return xomconfig
def process_command(command):
execcommand = shlex.split(command)
print(execcommand)
process = subprocess.run(execcommand,
stdout=subprocess.PIPE,
universal_newlines=True)
def remove_file(path):
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def empty_directory(path):
for i in glob.glob(os.path.join(path, '*')):
remove_file(i)
def main():
print()
print("--------------------------------------")
print("XOM BACKEND PROCESS COMPARE module ")
print("--------------------------------------")
print()
parser = ArgumentParser("proc_compare")
parser.add_argument("--verbose", help="Shows informations and statistics about the database", action='store_true')
parser.add_argument("--clean", help="clean the measurements : data, todo, done, to be used in test phase only", action='store_true')
parser.add_argument("--loglevel", type=str, help="Shows informations and statistics about the database", default='INFO')
parser.add_argument("--test", help="writes and reads test database", action = 'store_true')
args = parser.parse_args()
verbose = args.verbose
clean = args.clean
loglevel = args.loglevel
testmode = args.test
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(loglevel.upper())
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(name)-20s - %(levelname)-5s - %(message)s')
ch.setFormatter(formatterch)
# add the handlers to the logger
logger.addHandler(ch)
# text file where all the command to be executed will be written
command_file = 'list_of_commands.txt'
# connect to the Xenon run database
logger.info('connecting to DAQ dbs')
rundb = connect_to_DAQ_DB('runs')
# conncet to xom database
logger.info('connecting to xom dbs')
type_of_db = constant.type_of_db
if testmode:
print("---------------------------------------------------------- ")
print("---------------------------------------------------------- ")
print("---------------------------------------------------------- ")
print("---------------------------------------------------------- ")
print('test mode')
print("---------------------------------------------------------- ")
print("---------------------------------------------------------- ")
print("---------------------------------------------------------- ")
print("---------------------------------------------------------- ")
xomdb = dbl.Xomdb(type_of_db,"test_xomdata")
xomdbtodo = dbl.Xomdb(type_of_db,"test_xomtodo")
xomdbdone = dbl.Xomdb(type_of_db,"test_xomdone")
xomdbsubmitted = dbl.Xomdb(type_of_db,"test_xomsubmitted")
else:
print('no test mode')
xomdb = dbl.Xomdb(type_of_db,"xomdata")
xomdbtodo = dbl.Xomdb(type_of_db,"xomtodo")
xomdbdone = dbl.Xomdb(type_of_db,"xomdone")
xomdbsubmitted = dbl.Xomdb(type_of_db,"xomsubmitted")
if clean:
xomdb.delete()
xomdbtodo.delete()
xomdbdone.delete()
xomdbsubmitted.delete()
# here erase the job files
empty_directory(constant.job_folder)
logger.warning("delete all the measurement related to XOM")
#############################
### sets up the xomconfig ###
#############################
xomconfig = get_xom_config(constant.configname)
logger.info('set up config with file %s: ', constant.configname)
analysis_names = xomconfig.sections()
##############################################
### filling a list with analysis instances ###
##############################################
analysis_list = []
for analysis_name in analysis_names:
an = analysis.Analysis(analysis_name)
#getattr(analysismodule, analysis_name)(analysis_name)
print (an.analysis_name)
an.fill_from_config(xomconfig)
analysis_list.append(an)
# check if analysis exists in xom db
xomdb.get_last_runid_from_analysis(analysis_name)
if verbose:
for an in analysis_list:
an.print_config()
stop_condition = 0
prev_last_run_xom = 0
prev_last_run_daq = 0
last_run_xom = int(xomdb.get_last_runid())
last_run_daq = get_last_runid(rundb,"number")
print("latest entry in DAQ = ",last_run_daq)
print("latest entry in XOM DB = ",last_run_xom)
##############################
### Starting the main loop ###
##############################
while(stop_condition<1):
# check for new runs in run DB
last_run_daq = get_last_runid(rundb,"number")
if prev_last_run_daq==last_run_daq:
logger.info('no DAQ new run')
time.sleep(constant.exec_period)
continue
if last_run_daq > prev_last_run_daq:
prev_last_run_daq = last_run_daq
# check if xom is up to date
for an in analysis_list:
# need to be already presnent in the data base
last_todo_xom = xomdbtodo.get_last_runid_from_analysis(an.analysis_name)
last_done_xom = xomdbdone.get_last_runid_from_analysis(an.analysis_name)
last_submitted_xom = xomdbsubmitted.get_last_runid_from_analysis(an.analysis_name)
logger.debug(f"last_todo_xom = {last_todo_xom} last_submitted_xom = {last_submitted_xom} last_done_xom = {last_done_xom} and min_run = {an.min_run}")
last_xom = max([last_todo_xom,last_done_xom,last_submitted_xom,an.min_run])
if an.max_run:
max_run = min([an.max_run,last_run_daq])
else:
max_run = last_run_daq
if last_run_daq == last_xom:
logger.info("nothing to write in todo dB, analysis %s up to date", an.analysis_name)
continue
else:
#produce list of runs according the analysis:
logger.info(f"producing the list of new runs from runid {last_xom } to runid {max_run}")
# list_of_new_runs = list(range(last_todo_xom +1, last_run_daq +1 ,1))
list_of_command = an.produce_list_of_runs(last_xom, max_run, testmode)
time.sleep(constant.exec_period)
if __name__ == "__main__":
main()
<file_sep>import strax
import straxen
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from argparse import ArgumentParser
import argparse
import cutax
from cutax.cuts.krypton_selections import KrSingleS1S2
from cutax.cuts.krypton_selections import KrDoubleS1SingleS2
import sys
sys.path +=['../../../utils/']
import xomlib
def press_run(run_id):
# st = cutax.contexts.xenonnt_offline(include_rucio_local=False,include_rucio_remote=True )
st = cutax.xenonnt_online(_rucio_local_path='/project/lgrandi/rucio', include_rucio_local = True)
st.storage +=[strax.DataDirectory('/project2/lgrandi/xenonnt/processed', provide_run_metadata=True)]
# st = cutax.xenonnt_online(output_folder = "/home/gaior/codes/test/.", _rucio_local_path='/project/lgrandi/rucio', include_rucio_local = True)
st.register([KrSingleS1S2,KrDoubleS1SingleS2])
# st.storage += [strax.DataDirectory('/project2/lgrandi/xenonnt/processed', provide_run_metadata=True)]
data_singleS1 = st.get_df(run_id, targets =("event_info_double", "cut_fiducial_volume", "cut_Kr_SingleS1S2"),
selection_str= ("cut_fiducial_volume",'cut_Kr_SingleS1S2'))
data_doubleS1 = st.get_df(run_id, targets =("event_info_double", "cut_fiducial_volume", "cut_Kr_DoubleS1_SingleS2"),
selection_str= ("cut_fiducial_volume",'cut_Kr_DoubleS1_SingleS2'))
ES1a = 32.2
ES1b = 9.4
#energie du pic
ES1 = 41.6
#Light Yield
S1_a1 = data_doubleS1['s1_a_area'].values
S1_b1 = data_doubleS1['s1_b_area'].values
S1_a = data_singleS1['s1_a_area'].values
#mean
S1amoy = np.mean(S1_a1)
S1bmoy = np.mean(S1_b1)
S1_41 = np.mean(S1_a)
#standard deviation
S1asigma = np.std(S1_a1)
S1bsigma = np.std(S1_b1)
S1_41sigma = np.std(S1_a)
#standard error of mean
errorS1amoy = S1asigma/np.sqrt(len(S1_a1))
errorS1bmoy = S1bsigma/np.sqrt(len(S1_b1))
errorS1_41moy = S1_41sigma/np.sqrt(len(S1_a))
#light yield
LYS1a = S1amoy/ES1a
LYS1b = S1bmoy/ES1b
LYS1_41 = S1_41/ES1
#light yield error
DLyS1a = errorS1amoy/ES1a
DLyS1b = errorS1bmoy/ES1b
DLyS1_41 = errorS1_41moy/ES1
#removing useless datas
del data_doubleS1
del data_singleS1
del S1_a1
del S1_b1
del S1_a
xomresult = xomlib.Xomresult(analysis_name="light_yield",
analysis_version = "v0.0",
variable_name='LYS1_41',
variable_value=LYS1_41,
runid=int(run_id),
data= {"LYS1a":LYS1a, "DLYS1a":DLyS1a,
"LYS1b":LYS1b, "DLYS1b":DLyS1b,
"LYS1_41":LYS1_41, "DLYS1_41":DLyS1_41})
xomresult.save()
xomresult.xom_message(success=True)
# results = []
# results.append(run_id)
# results.append(LYS1a)
# results.append(DLyS1a)
# results.append(LYS1b)
# results.append(DLyS1b)
# results.append(LYS1_41)
# results.append(DLyS1_41)
# np.save('/home/pellegriniq/tab22', results)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("echo")
args = parser.parse_args()
print(args.echo)
print("start")
press_run(args.echo)
print('end')
if __name__ == "__main__":
main()
<file_sep>#!/bin/bash
#SBATCH --job-name=xom_job
#SBATCH --ntasks 1
#SBATCH --cpus-per-task 1
#SBATCH --mem-per-cpu=8G
#SBATCH --output=JobName-%j.out
#SBATCH --error=JobName-%j.err
#SBATCH --account=pi-lgrandi
#SBATCH --qos=xenon1t
#SBATCH --partition=xenon1t
CONTAINER_PATH="/project2/lgrandi/xenonnt/singularity-images/xenonnt-development.simg"
SCRIPT="/home/pellegriniq/xom_v1.py 031831"
USER=gaior
RUNDIR="/home/gaior/codes/xom/backend/algorithms/scada/"
echo $INNER_SCRIPT
module load singularity
singularity exec --cleanenv --bind /cvmfs/ --bind /project/ --bind /project2/ --bind /home/$USER --bind /project2/lgrandi/xenonnt/dali:/dali $CONTAINER_PATH python3 $SCRIPT
<file_sep>XOM - Xenon Offline Monitoring
=
* Free software: BSD license
* Documentation: Available on internal XENON wiki
Features
--------
* Analyses XENONnT data and monitors useful quantities through a web application
* The web app is the xenon grafana
Usage
--------
the codes runs
in screen session 1, enter the containre xenon.development.simg
in screen session 2 load modules python and singularity:
- module load python
- module load singularity
choose the analysis to be computed in the /utils/xomconfig.cfg
Then in screen 1 go to /backend/ and execute:
- python proc_compare.py
In screen 2 go to /backend/ and execute:
in an environnement with influxdb-client and numpy isntall with pip
- python proc_runner.py
One can check the latest entry in the data base in /utils/:
- python xomdblib.py --latest
Influxdb structure:
----------------------
measurement: xom version
field: variable
Tag1: analysis name
Tag2: variable name
Tag3: container
field: runid
Tag3: optional analyse dependant tag (example raw / mean)
Todo list:
------------------------
cron tab instead of a while true
job out files handling
in dblib: declare once the query_api etc
handle the import of xomlib
describe well the running procedure
write check functions
The way to delete the records is now by deleting +- 1us of the time of the record. Maybe cleaner way exist
The todo record show the other variable name, which is not foreseen...
Query of many runs in the DB
include detector in the selection of run (in analysis.py) and in the config file (could include non TPC analysis ?)
better check of the number of runs (with --name )
make the test mode more uniform
done list:
-------------
--Job success check--
submitted database to store the entry while it is being run.
If the run doesn't succeed, the entry will stay and not go to the done db
The job is considered successful if the analysis has reached the point where xom prints SUCCESSWITHXOM.
proc_runner checks that this message is in the output file.
-- <file_sep>from setuptools import setup, find_packages
# Get requirements from requirements.txt, stripping the version tags
with open('requirements.txt') as f:
requires = [
r.split('/')[-1] if r.startswith('git+') else r
for r in f.read().splitlines()]
with open('README.md') as file:
readme = file.read()
with open('HISTORY.md') as file:
history = file.read()
setup(
name="utilix",
version="0.7.3",
url='https://github.com/XENONnT/utilix',
description="User-friendly interface to various utilities for XENON users",
long_description_content_type='text/markdown',
packages=find_packages(),
install_requires=requires,
python_requires=">=3.6",
long_description=readme + '\n\n' + history,
)
<file_sep>import strax
import straxen
import cutax
from argparse import ArgumentParser
import sys
sys.path +=['../utils/']
import utils
import constant
def main():
parser = ArgumentParser()
parser.add_argument('--excluded', nargs='+', type=str, default=[])
parser.add_argument('--included', nargs='+', type=str, default=[])
parser.add_argument('--available', nargs='+', type=str, default=[])
parser.add_argument('--analysis', type=str, default='')
parser.add_argument('--container', type=str, default='')
args = parser.parse_args()
exclude_tags = args.excluded
include_tags = args.included
available_type = args.available
analysis_name = args.analysis
container = args.container
#cluster='midway2'
st = cutax.xenonnt_online(_rucio_local_path='/project/lgrandi/rucio', include_rucio_local = True)
st.storage += [strax.DataDirectory('/project2/lgrandi/xenonnt/processed', provide_run_metadata=True)]
# cluster='midway2'
# if cluster=='midway2':
# st = straxen.contexts.xenonnt_online(output_folder="/project2/lgrandi/xenonnt/processed/", _rucio_path=None)
# elif cluster=='dali':
# st = straxen.contexts.xenonnt_online()
# else:
# print('No valid cluster specified. Defaulting to dali options.')
# st = straxen.contexts.xenonnt_online()
if available_type and include_tags and exclude_tags:
print("available_type and include_tags and exclude_tags")
allruns = st.select_runs(available=available_type,
exclude_tags=exclude_tags,
include_tags=include_tags)
elif available_type and exclude_tags and not include_tags:
print("available_type and exclude_tags and not include_tags")
allruns = st.select_runs(available=available_type,
exclude_tags=exclude_tags)
elif available_type and not exclude_tags and include_tags:
print("available_type and not exclude_tags and include_tags")
allruns = st.select_runs(available=available_type,
include_tags=include_tags)
elif not available_type and exclude_tags and include_tags:
print("not available_type and exclude_tags and include_tags")
allruns = st.select_runs(exclude_tags=exclude_tags,
include_tags=include_tags)
elif not available_type and not exclude_tags and include_tags:
print("not available_type and not exclude_tags and include_tags")
allruns = st.select_runs(include_tags=include_tags)
elif not available_type and not include_tags and exclude_tags:
print("not available_type and not include_tags and exclude_tags")
allruns = st.select_runs(exclude_tags=exclude_tags)
elif not exclude_tags and not include_tags and available_type :
print("not exclude_tags and not include_tags and available_type")
allruns = st.select_runs(available=available_type)
else:
print("no condition at all")
# allruns = st.select_runs(exclude_tags=exclude_tags)
allruns = st.select_runs()
name_of_file = constant.availability_files_folder + analysis_name + "_" + container
allruns.number.to_csv(name_of_file)
if __name__ == "__main__":
main()
<file_sep>#!/usr/bin/env python
import os
#import numpy as np
import time
import subprocess
import shlex
import sys
sys.path +=['../utils/']
#import locklib as ll
import utils
import constant
import dblib as dbl
import xomlib
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
from argparse import ArgumentParser
import glob
import time
from utilix.batchq import submit_job
import logging
from logging.handlers import TimedRotatingFileHandler
logger = logging.getLogger('proc_runner')
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_level = 10
handler = TimedRotatingFileHandler('../logs/proc_runner.log', when="midnight", interval=1)
logger.setLevel(log_level)
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
# add a suffix which you want
handler.suffix = "%Y%m%d"
# finally add handler to logger
logger.addHandler(handler)
import pandas as pd
def check_available(analysis_name,container,runid):
filename = constant.availability_files_folder + analysis_name + '_' + container
df = pd.read_csv(filename)
if runid in df.number.values:
return True
else:
return False
stop_condition = True
type_of_db = constant.type_of_db
def main():
print()
print("--------------------------------------")
print("XOM BACKEND PROCESS RUNNER module ")
print("--------------------------------------")
print()
# submitted_jobs = 0
parser = ArgumentParser("proc_runner")
parser.add_argument("analysis_name", type=str, help="", default='event_rate')
parser.add_argument("runid", type=int, help="Logging level", default=52551)
args = parser.parse_args()
analysis_to_test = args.anaysis_name
runid = args.runid
# first check the availability. Should be replaced at some point by the query of the availability directly at the todo stage
xomconfig = utils.get_xom_config()
analysis_names = xomconfig.sections()
analysis_list = []
count +=1
for analysis_name in analysis_names:
if analysis_name == analysis_to_test:
containers = utils.get_from_config(xomconfig,analysis_name, 'container')
exclude_tags = utils.get_from_config(xomconfig,analysis_name, 'exclude_tags')
include_tags = utils.get_from_config(xomconfig,analysis_name, 'include_tags')
available_type = utils.get_from_config(xomconfig,analysis_name, 'available_type')
args = {}
if exclude_tags:
args[' --excluded '] = exclude_tags
if include_tags:
args[' --included '] = include_tags
if available_type:
args[' --available '] = available_type
for cont in containers:
command = "python test_data.py " + ' --container ' + cont + ' --analysis ' + analysis_name
for key, value in zip(args.keys(), args.values()) :
command+= key
command+= " ".join(value)
allcommand = constant.singularity_base + cont + " " + command + '\n'
print (command)
execcommand = shlex.split(allcommand)
process = subprocess.run(execcommand,
stdout=subprocess.PIPE,
universal_newlines=True)
is_available = check_available(analysis_name,p_container,runid)
sbatch_filename = constant.job_folder + "test_" + analysis_name + "_" + str(runid)
if is_available:
code_folder = "cd " + constant.analysis_code_folder + xomconfig.get(analysis_name,'folder') + " \n"
print(code_folder)
command = "python " + xomconfig.get(analysis_name,'command')
print(command)
analysis_command = code_folder + command.replace('[run]',str(runid).zfill(6))
print(analysis_command)
log_filename = constant.job_folder + 'test_' + analysis_name + "_" + str(runid) +'.log'
print(f'data for run {runid} is available, will submit the job {sbatch_filename}')
submit_job(jobstring= analysis_command,
log= log_filename,
partition = constant.job_partition,
qos = constant.job_partition,
jobname = 'xom_job',
sbatch_file = sbatch_filename,
dry_run=False,
mem_per_cpu=1000,
container='xenonnt-development.simg',
cpus_per_task=1,
hours=None,
node=None,
exclude_nodes=None,
)
<file_sep>import strax
import straxen
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from argparse import ArgumentParser
import argparse
import cutax
import utilix
import sys
sys.path +=['../../../utils/']
import xomlib
def press_run(run_id):
# st = cutax.contexts.xenonnt_online(include_rucio_local=False,include_rucio_remote=True )
st = cutax.xenonnt_online(_rucio_local_path='/project/lgrandi/rucio', include_rucio_local = True)
st.storage += [strax.DataDirectory('/project2/lgrandi/xenonnt/processed', provide_run_metadata=True)]
if st.is_stored(run_id, 'event_basics') & st.is_stored(run_id, 'peak_basics'):
if not st.is_stored(run_id, 'event_info'):
try:
# self.st.copy_to_frontend(run_id, 'peak_shadow')
st.copy_to_frontend(run_id, 'peak_basics')
st.copy_to_frontend(run_id, 'event_basics')
self.st.make(run_id, 'event_info')
print('it made event_info')
except:
print('Data not movable')
return
cuts_in_use = []
events = st.get_array(run_id, ['event_info', ] + cuts_in_use,
selection_str=['s2_area>1000',
'z_naive>-5', 'r<70', 'z_naive<-0.5',
's1_area_fraction_top<0.65',
's1_tight_coincidence>3',
] + cuts_in_use,
keep_columns=['time', 'endtime', 's2_center_time', 's2_area'],
progress_bar=False
)
peak_basics = st.get_array(run_id, ['peak_basics'],
selection_str=('type' == 2),
progress_bar=False,
keep_columns=['time', 'endtime', 'area'],
)
if straxen.utilix_is_configured():
c = utilix.xent_collection()
_doc = c.find_one({'number': int(run_id)}, projection={'mode': True, 'start': True, 'tags': True, 'end': True})
start, tag, end = _doc['start'], _doc['tags'], _doc['end']
livetime = (end-start).total_seconds()
#print(livetime/60)
#livetime = (livetime / 1e9)
#print('utilix',livetime)
else:
N = len(peak_basics)
livetime = peak_basics['time'][N-1] - peak_basics['time'][0]
#print('mine', livetime)
min_drift_time = 300
max_drift_time = 2200
containers = np.zeros(len(events), dtype=[('time', np.float64), ('endtime', np.float64)])
containers['time'] = events['s2_center_time'] + min_drift_time * 1e3
containers['endtime'] = events['s2_center_time'] + max_drift_time * 1e3
split_peak_basics = strax.split_by_containment(peak_basics, containers)
result = np.concatenate(split_peak_basics)
print(len(result))
rate = len(result) / (float(livetime))
area = np.sum(result['area']) / np.sum(events['s2_area'])
variables = []
variables.append(rate)
variables.append(area)
print(f'Save sucessfully for run {run_id}',variables)
xomresult = xomlib.Xomresult(analysis_name="photo_ionization",
analysis_version = "v0.0",
variable_name='area',
variable_value=area,
runid=int(run_id),
data= {"area":area, "rate":rate})
xomresult.xom_message(success=True)
xomresult.save()
else:
xomresult.xom_message(success=False)
print('Data not available')
return
def main():
print("start")
parser = argparse.ArgumentParser()
parser.add_argument("echo")
args = parser.parse_args()
print(args.echo)
parser = ArgumentParser()
press_run(args.echo)
print('end')
if __name__ == "__main__":
main()
<file_sep>import dash
from flask import Flask
from flask.helpers import get_root_path
from flask_login import login_required
from config import BaseConfig
def create_app():
server = Flask(__name__)
server.config.from_object(BaseConfig)
register_extensions(server)
register_blueprints(server)
# from app.dashapp1.layout import layout as layout1
# # from app.dashapp1.callbacks import register_callbacks as register_callbacks1
# from app.dashapp1.layout import register_callbacks as register_callbacks1
# register_dashapp(server, 'app1', 'dash/app1', layout1, register_callbacks1)
# from app.dashapp2.layout import layout as layout2
# from app.dashapp2.layout import register_callbacks as register_callbacks2
# # from app.dashapp2.callbacks import register_callbacks as register_callbacks2
# register_dashapp(server, 'app2', 'dash/app2', layout2, register_callbacks2)
from app.dashapp3.layout import layout as layout3
from app.dashapp3.layout import register_callbacks as register_callbacks3
register_dashapp(server, 'app3', 'dash/app3', layout3, register_callbacks3)
# from app.dashapp4.layout import layout as layout4
# from app.dashapp4.layout import register_callbacks as register_callbacks4
# register_dashapp(server, 'app4', 'dash/app4', layout4, register_callbacks4)
from app.dashapp5.layout import layout as layout5
from app.dashapp5.layout import register_callbacks as register_callbacks5
register_dashapp(server, 'app5', 'dash/app5', layout5, register_callbacks5)
from app.dashapp6.layout import layout as layout6
from app.dashapp6.layout import register_callbacks as register_callbacks6
register_dashapp(server, 'app6', 'dash/app6', layout6, register_callbacks6)
return server
def register_dashapp(app, title, base_pathname, layout, register_callbacks_fun):
# Meta tags for viewport responsiveness
meta_viewport = {"name": "viewport", "content": "width=device-width, initial-scale=1, shrink-to-fit=no"}
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
my_dashapp = dash.Dash(__name__,
server=app,
url_base_pathname=f'/{base_pathname}/',
assets_folder=get_root_path(__name__) + f'/assets/',
# assets_folder=f'/assets/',
meta_tags=[meta_viewport],external_stylesheets=external_stylesheets)
with app.app_context():
my_dashapp.title = title
my_dashapp.layout = layout
register_callbacks_fun(my_dashapp)
_protect_dashviews(my_dashapp)
def _protect_dashviews(dashapp):
for view_func in dashapp.server.view_functions:
if view_func.startswith(dashapp.config.url_base_pathname):
dashapp.server.view_functions[view_func] = login_required(dashapp.server.view_functions[view_func])
def register_extensions(server):
from app.extensions import mongo
from app.extensions import db
from app.extensions import login
from app.extensions import migrate
from app.extensions import ldap
db.init_app(server)
login.init_app(server)
login.login_view = 'main.login'
migrate.init_app(server, db)
mongo.init_app(server)
ldap.init_app(server)
def register_blueprints(server):
from app.webapp import server_bp
from app.webapp import main_bp
server.register_blueprint(server_bp)
server.register_blueprint(main_bp)
| 924b5b87cfca8490384bcdcca20fef22912a1151 | [
"Markdown",
"Python",
"Shell"
] | 48 | Python | XENONnT/xom | ea4f32380e2ac39a5d8a134075003a7607ea9c85 | 90e87553408dfd6c30d3bc5d4ed72f6b5b6f4669 |
refs/heads/master | <repo_name>nn5980/base64cpp<file_sep>/README.md
# base64cpp
Base64 encode/decode
<file_sep>/b64.h
class Base64 {
static const char cb64[];
static const char cd64[];
static void encodeblock_(unsigned char *in, unsigned char *out, long len);
static void encodeblock(unsigned char *in, unsigned char *out);
static int decodeblock_(unsigned char *in, unsigned char *out);
static void decodeblock(unsigned char *in, unsigned char *out);
static bool b64checkChar(char);
public:
static bool CheckIsBase64(char *src, long length);
static long CalcEncodedSize(long rawSize);
static long CalcDecodedSize(long size);
static long Encode(char *dest, char *src, long srcLength);
static long Decode(char *dest, char *src, long srcLength);
};
<file_sep>/b64.cpp
#include "b64.h"
#define ENCODED_BLOCK_SIZE 4
#define RAW_BLOCK_SIZE 3
const char Base64::cb64[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
const char Base64::cd64[] = {
62,
-1, -1, -1,
63, 52,53,54,55,56,57,58,59,60,61,
-1, -1, -1, -1, -1, -1, -1,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,
-1, -1, -1, -1, -1, -1,
26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51
};
void Base64::encodeblock_(unsigned char *in, unsigned char *out, long len)
{
out[0] = cb64[(int)(in[0] >> 2)];
if (len == 1)
{
out[1] = cb64[(int)(((in[0] & 0x03) << 4))];
out[2] = '=';
out[3] = '=';
return;
}
out[1] = (cb64[(int)(((in[0] & 0x03) << 4) | ((in[1] & 0xf0) >> 4))]);
if (len == 2)
{
out[2] = cb64[(int)((in[1] & 0x0f) << 2)];
out[3] = '=';
return;
}
out[2] = (len > 1 ? cb64[(int)(((in[1] & 0x0f) << 2) | ((in[2] & 0xc0) >> 6))] : '=');
out[3] = (len > 2 ? cb64[(int)(in[2] & 0x3f)] : '=');
}
void Base64::encodeblock(unsigned char *in, unsigned char *out)
{
out[0] = cb64[(int)(in[0] >> 2)];
out[1] = cb64[(int)(((in[0] & 0x03) << 4) | ((in[1] & 0xf0) >> 4))];
out[2] = cb64[(int)(((in[1] & 0x0f) << 2) | ((in[2] & 0xc0) >> 6))];
out[3] = cb64[(int)(in[2] & 0x3f)];
}
int Base64::decodeblock_(unsigned char *in, unsigned char *out)
{
unsigned char in_[ENCODED_BLOCK_SIZE];
in_[0] = cd64[in[0] - '+'];
in_[1] = cd64[in[1] - '+'];
in_[2] = cd64[in[2] - '+'];
in_[3] = cd64[in[3] - '+'];
out[0] = (((in_[0] << 2) & 0xfc) | in_[1] >> 4);
if (in[2] == '=')
return 2;
else
out[1] = (((in_[1] << 4) & 0xf0) | in_[2] >> 2);
if (in[3] == '=')
return 1;
else
out[2] = (((in_[2] << 6) & 0xc0) | in_[3]);
return 0;
}
void Base64::decodeblock(unsigned char *in, unsigned char *out)
{
unsigned char in_[ENCODED_BLOCK_SIZE];
in_[0] = cd64[in[0] - '+'];
in_[1] = cd64[in[1] - '+'];
in_[2] = cd64[in[2] - '+'];
in_[3] = cd64[in[3] - '+'];
out[0] = (((in_[0] << 2) & 0xfc) | in_[1] >> 4);
out[1] = (((in_[1] << 4) & 0xf0) | in_[2] >> 2);
out[2] = (((in_[2] << 6) & 0xc0) | in_[3]);
}
bool Base64::b64checkChar(char c)
{
if (c == '=')
return true;
else if (c < ('+' + 80) && (c >= '+'))
if (cd64[c - '+'] >= 0)
return true;
return false;
}
bool Base64::CheckIsBase64(char *src, long length) {
for (long i = 0; i < length; i++)
if (!b64checkChar(src[i]))
return false;
return true;
}
long Base64::CalcEncodedSize(long rawSize) {
long rest = rawSize % RAW_BLOCK_SIZE;
if (rest) rest = RAW_BLOCK_SIZE - rest;
long absSize = rawSize + rest;
return absSize / RAW_BLOCK_SIZE * ENCODED_BLOCK_SIZE;
}
long Base64::CalcDecodedSize(long size)
{
return size / ENCODED_BLOCK_SIZE * RAW_BLOCK_SIZE;
}
long Base64::Encode(char *dest, char *src, long sizeSrc)
{
long sizeEncoded = CalcEncodedSize(sizeSrc);
int rest = sizeSrc % RAW_BLOCK_SIZE;
long absSize = sizeSrc - rest;
for (long i = 0, j = 0; i < absSize; i += RAW_BLOCK_SIZE, j += ENCODED_BLOCK_SIZE) {
encodeblock((unsigned char*)&src[i], (unsigned char*)&dest[j]);
}
if (rest)
encodeblock_((unsigned char*)&src[absSize], (unsigned char*)&dest[sizeEncoded-ENCODED_BLOCK_SIZE],rest);
return sizeEncoded;
}
long Base64::Decode(char *dest, char *src, long srcLength)
{
long destLength = CalcDecodedSize(srcLength);
long length = srcLength - ENCODED_BLOCK_SIZE;
unsigned char *up_dest = (unsigned char*)dest;
unsigned char *up_src = (unsigned char*)src;
for (long i = 0, j = 0; i < length; i += ENCODED_BLOCK_SIZE, j += RAW_BLOCK_SIZE) {
decodeblock(&up_src[i], &up_dest[j]);
}
return destLength - decodeblock_(&up_src[length], &up_dest[destLength - RAW_BLOCK_SIZE]); //result decoded size
} | f8f21d7cfda64ca01d52c4a714d67214003cb49e | [
"Markdown",
"C++"
] | 3 | Markdown | nn5980/base64cpp | 39f179d79f20642038575a60db798dee279229e1 | e0a5219680332adf72356a610304eba37d837b3c |
refs/heads/master | <repo_name>Sorangon/NEOC-1_JAM<file_sep>/Source/NEOC_Jam_1/Private/LevelData.cpp
// Fill out your copyright notice in the Description page of Project Settings.
#include "LevelData.h"
ULevelData::ULevelData()
{
RoundDatas = TArray<FRoundData>();
FRoundData DefaultRound = FRoundData();
DefaultRound.Waves = TArray<FWaveData>();
FWaveData DefaultWave = FWaveData();
DefaultRound.Waves.Add(DefaultWave);
RoundDatas.Add(DefaultRound);
}
<file_sep>/Source/NEOC_Jam_1/Public/LevelData.h
#pragma once
#include "CoreMinimal.h"
#include "Engine/DataAsset.h"
#include "Enemy.h"
#include "LevelData.generated.h"
USTRUCT(BlueprintType)
///<summary>
///Contains the enemy type to generate and his target spawner
///</summary>
struct FEnemyData {
GENERATED_BODY()
UPROPERTY(EditAnywhere, BlueprintReadWrite)
TSubclassOf<AEnemy> EnemyActorType;
UPROPERTY(EditAnywhere, BlueprintReadWrite)
int32 TargetSpawner = 0;
};
USTRUCT(BlueprintType)
///<summary>
///Contains all the data required to generate a wave
///</summary>
struct FWaveData {
GENERATED_BODY()
UPROPERTY(EditAnywhere, BlueprintReadWrite)
TArray<FEnemyData> Enemies;
UPROPERTY(EditAnywhere, BlueprintReadWrite)
int32 AllowedRemainingEnemies = 0;
};
USTRUCT(BlueprintType)
///<summary>
///Contains all the data required to manage a round
///</summary>
struct FRoundData {
GENERATED_BODY()
UPROPERTY(EditAnywhere, BlueprintReadWrite)
TArray<FWaveData> Waves;
};
/**
*
*/
UCLASS(BlueprintType)
class NEOC_JAM_1_API ULevelData : public UDataAsset {
GENERATED_BODY()
ULevelData();
public:
UPROPERTY(EditAnywhere, BlueprintReadonly)
TArray<FRoundData> RoundDatas;
};
<file_sep>/README.md
# NEOC-1_JAM
The project repository of my game for the Neon City Jam
| b0ad9260706721422ad8d507182106564187f3cd | [
"Markdown",
"C++"
] | 3 | C++ | Sorangon/NEOC-1_JAM | 4dcef3042d551913fb8c7419cb8bc1a80dbf9381 | 507e206d789e98566e29313624aab7e4b505a173 |
refs/heads/master | <repo_name>Santoshgautam/RServices_RoR_BS<file_sep>/app/helpers/service_requests_helper.rb
module ServiceRequestsHelper
def city_selection_from_address service_request
service_request.address.city.id
end
def get_sub_services_list service_id
Service.where(parent_id: get_parent_id(service_id))
end
def get_services_list service_request
city = service_request.address.city
city.services.where(id: get_parent_id(service_request.service_id) )
end
def get_available_time_slots service_request
service_request.portfolio.available_time_slots_for_update(service_request)
end
def get_parent_id service_id
Service.find(service_id).parent_id
end
def user_actions request
if ["Completed", "Cancelled", "Rejected"].include?(request.status.try(:name))
(link_to t('.show', :default => t("helpers.links.show")),service_request_path(request),:class => 'btn btn-xs btn-primary')
elsif ["Pending", "InCompleted"].include?(request.status.try(:name))
(link_to t('.show', :default => t("helpers.links.show")),service_request_path(request),:class => 'btn btn-xs btn-primary btn_margin') +
(link_to t('.edit', :default => t("helpers.links.edit")), edit_service_request_path(request), :class => 'btn btn-primary btn-xs btn_margin') +
(link_to t('.cancel', :default => t("helpers.links.cancel")),cancel_service_request_service_request_path(request),:method => :patch,:data => { :confirm => t('.confirm', :default => t("helpers.links.confirm", :default => 'Are you sure?')) },:class => 'btn btn-xs btn-danger')
else
(link_to t('.show', :default => t("helpers.links.show")),service_request_path(request),:class => 'btn btn-xs btn-primary btn_margin')
end
end
def user_ratings request
if request.status.try(:name) == "Completed"
if current_user.user?
request.feedback.present? ? add_rating_html_block(request) : (link_to 'Give Rating',new_feedback_path(portfolio_id: request.portfolio_id, request_id: request.id),:method => :get,:class => 'btn btn-success btn-xs')
else
request.feedback.present? ? add_rating_html_block(request) : '-'
end
else
"Not Yet Rated"
end
end
def service_request_header_html_block
columns = ['Service Name', 'Address','User','Service Date','Time','Service Request Number','Status','Rating','Actions']
thead = content_tag :thead do
content_tag :tr do
columns.collect {|column| concat content_tag(:th,column)}.join().html_safe
end
end
end
def address_action_links address
(link_to edit_address_path(address),class: 'btn btn-default btn-xs' do
content_tag(:span, '',class: 'glyphicon glyphicon-edit')
end) +
(link_to address_path(address),method: "delete",class: 'btn btn-default btn-xs',:data => { :confirm => t('.confirm', :default => t("helpers.links.confirm", :default => 'Are you sure?')) } do
content_tag(:span, '',class: 'glyphicon glyphicon-remove')
end)
end
def assigned_user(user_id=nil)
user_id.present? ? User.find(user_id).try(:email) : "-"
end
end
<file_sep>/db/migrate/20180828094653_cities_services.rb
class CitiesServices < ActiveRecord::Migration[5.2]
def change
create_table :cities_services, id: false do |t|
t.belongs_to :city, index: true
t.belongs_to :service, index: true
end
end
end
<file_sep>/app/helpers/partner/company_services_helper.rb
module Partner::CompanyServicesHelper
def select_available_services sub_services, form
form.select(:service_id, options_from_collection_for_select(sub_services, :id, :name), { :prompt => "Select Sub Service"}, required: 'true', class: 'form-control form_input', data: {remote: true, url: get_cities_path, method: 'get'})
end
def select_service_city cities
(label_tag 'cities', 'Cities', class: "control-label col-sm-2") +
content_tag(:div,class: "col-sm-10") do
select_tag 'portfolio_service[city_ids]', options_from_collection_for_select(cities, :id, :name), multiple: true, class: 'form-control form_input'
end
end
# need to refactor
# def get_city_name(service_id)
# current_user.portfolio.portfolio_services.where(service_id: service_id).map{|m| m.city.name}.join(", ")
# end
def company_service_price service
service.price.present? ? service.price : "0.00"
end
end
<file_sep>/app/controllers/admin/dashboard_controller.rb
class Admin::DashboardController < AdminController
authorize_resource :class => :dashboard
def index
end
end
<file_sep>/app/controllers/addresses_controller.rb
class AddressesController < ApplicationController
before_action :authenticate_user!
before_action :find_address, only: [:edit, :update, :destroy]
def new
@address = params[:address].present? ? Address.near(params[:search], 50, :order => :distance) : Address.new
end
def index
@addresses = current_user.addresses
end
def edit
@cities = City.details
end
def update
@address.update_attributes(address_params)
if @address
flash[:success] = "Updated address Successfully!"
redirect_to addresses_path
else
flash[:error] = "Error"
redirect_to addresses_path
end
end
def create
@address = params[:address].present? ? current_user.addresses.create(address_params) : current_user.addresses.new(address_params)
if @address.save
flash.now[:error] = "Address created successfully!"
redirect_to '/dashboard'
else
flash.now[:error] = "Address could not save!"
@cities = City.details
render :new
end
end
def get_states_and_cities
if params[:country_selection].present?
@states = State.details(params[:country_selection])
elsif params[:state_selection].present?
@cities = City.details(params[:state_selection])
end
end
def destroy
if @address.has_service_requests?
flash[:error] = "Address which availed services can't be deleted"
else
@address.destroy
flash[:success] = "Address deleted successfully!"
end
redirect_to addresses_path
end
private
def find_address
@address = Address.find(params[:id])
end
# Look better possibilities to merge extra params which is not included in form
def address_params
params.require(:address).permit(:flat_number,:street_name,:landmark, :user_id, :type,:pin_code, :city_id, :address, :google_address)
end
end
<file_sep>/app/controllers/feedbacks_controller.rb
class FeedbacksController < ApplicationController
before_action :authenticate_user!
def new
@feedback = Feedback.new
end
def create
@feedback = current_user.feedbacks.create(feedback_params)
if @feedback.persisted?
flash[:success] = "Successfully Rated!"
redirect_to service_requests_path
else
flash[:error] = @feedback.errors.full_messages.join(",")
redirect_back fallback_location: request.referrer
end
end
private
def feedback_params
params.require(:feedback).permit(:description, :rating, :user_id, :portfolio_id, :service_request_id)
end
end
<file_sep>/app/controllers/partner/users_controller.rb
class Partner::UsersController < PartnerController
before_action :find_user, only: [:edit, :update]
def edit
end
def update
if @user.update_attributes(user_params)
flash[:success] = "Successfully Updated!"
else
flash[:error] = @user.errors.full_messages.join(",")
end
redirect_back fallback_location: request.referrer
end
private
def user_params
params.require(:user).permit(:first_name,:last_name,:phone, :phone)
end
def find_user
@user = User.find(params[:id])
end
end
<file_sep>/app/assets/javascripts/feedbacks.js
$(function(){
$('#default').raty(
{click: function(score, evt)
{
$('#feedback_rating').val(score);
}
});
});<file_sep>/db/migrate/20180904055750_add_time_slot_id_to_service_requests.rb
class AddTimeSlotIdToServiceRequests < ActiveRecord::Migration[5.2]
def change
add_column :service_requests, :time_slot_id, :integer
add_column :service_requests, :service_date, :datetime
end
end
<file_sep>/app/models/service_request.rb
class ServiceRequest < ApplicationRecord
#active record Associations
has_one :feedback
belongs_to :user
belongs_to :address
belongs_to :service
belongs_to :status
belongs_to :portfolio
belongs_to :time_slot
belongs_to :assigned_to, class_name: 'User',foreign_key: "assignee_id", optional: true
validates :user_id, :address_id, :service_id, :time_slot_id, presence: true
#scope method
scope :ordered, -> {order('updated_at DESC')}
scope :accepted_request, -> { where(status_id: Status.accepted.first.id) }
scope :available_employees, -> { where('status_id IN (?)', [Status.pending.first.id]).map{|m| m.assigned_to}.compact
}
#delegates to access address columns from service_request object
delegate :flat_number,:street_name,:pin_code,:city, :to => :address
#callback
before_validation :set_request_status, :generate_service_request_number
def set_request_status
self.status_id = Status.pending.first.id if status_id.blank?
end
def generate_service_request_number
self.service_request_number = "SR-#{SecureRandom.hex(10)}" unless self.service_request_number.present?
end
# def google_address?
# unless address.google_address.blank?
# true
# else
# false
# end
# end
# def google_address
# if google_address?
# address.google_address
# end
# end
def self.comments_list
['Task completed successfully.','Task is still in pending.', 'No one available at given address & timing.', 'Work depends on others', 'Other Reason']
end
def user_phone
user.phone.present? ? user.phone : "-"
end
def user_address
address.complete_address
end
def user_name
user.full_name
end
def service_name
service.try(:name).titleize
end
def service_status
status.try(:name)
end
def service_time
try(:time_slot).try(:start_time_with_end_time)
end
def self.get_all_service_requests page
where.not(portfolio_id: [nil, ""]).order("id DESC").paginate(:page => page, :per_page => 5)
end
def assignee_details
assigned_to.present? ? assigned_to.email : "No Assignee Yet"
end
end
<file_sep>/app/controllers/application_controller.rb
class ApplicationController < ActionController::Base
protect_from_forgery
rescue_from CanCan::AccessDenied do |exception|
respond_to do |format|
format.json { head :forbidden, content_type: 'text/html' }
format.html { redirect_to main_app.root_url, notice: exception.message }
format.js { head :forbidden, content_type: 'text/html' }
end
end
def authenticate_user!(*args)
unless current_user
flash[:error] = "You are not authorized to view that page."
redirect_to root_path
end
end
end
<file_sep>/app/controllers/admin/services_controller.rb
class Admin::ServicesController < AdminController
load_and_authorize_resource
before_action :set_service, only: [:show, :edit, :update, :destroy, :edit_sub_services]
layout 'admin'
def index
@services = Service.get_all_services(params[:page])
end
def show
end
def new
@service = Service.new
@cities = City.details.order(name: :asc)
end
def edit
@cities = City.details.order(name: :asc)
end
def create
@cities = City.details.order(name: :asc)
@service = Service.create(service_params)
if @service
flash[:success] = "Service created successfully "
redirect_to admin_services_path
else
flash[:error] = "Service request not created!"
redirect_to new_admin_service_path
end
end
def update
if @service.update(service_params)
flash[:success] = "Service updated successfully "
redirect_to admin_services_path
else
flash[:error] = "Service not updated!"
redirect_to new_admin_service_path
end
end
def destroy
@service.destroy
flash[:success] = "Service was successfully destroy !"
redirect_to admin_services_url
end
def sub_services
@services = Service.get_all_sub_services(params[:id], params[:page])
end
def create_sub_services
@sub_service = Service.new
end
def edit_sub_services
end
def update_sub_services
if @service.update(sub_service_params)
flash[:success] = "Service is successfully updated"
redirect_to sub_services_admin_service_path(@service.service)
else
flash[:error] = "Service is successfully updated"
redirect_to sub_services_admin_service_path(@service.service)
end
end
private
def set_service
@service = Service.find(params[:id])
end
def service_params
params.require(:service).permit(:name,:parent_id, :price, :photo, :city_ids=> [])
end
def sub_service_params
params.require(:service).permit(:name,:id, :price, :city_ids=> [])
end
end
<file_sep>/app/helpers/partner/users_helper.rb
module Partner::UsersHelper
end
<file_sep>/app/views/common/_validate.js
// Validating User form on client side.
$("#new_user").validate({
//error place
errorPlacement: function (error, element) {
error.insertBefore(element);
},
//adding rule
rules: {
// username is required with max of 20 and min of 6
"user[name]":{
required: true,
maxlength: 20,
minlength: 6
},
// // email is required with format
// "user[email]": {
// required: true,
// email: true
// },
// // password is required
// "user[password]": {
// required: true
// },
// // password_confirmation is required and is the same with password
// "user[password_confirmation]": {
// required: true,
// equalTo: "#user_password"
// },
// // introduction is optional with maxlenght 500
// "user[password_confirmation]": {
// maxlength: 500
// }
},
// error messages
messages: {
name:{
required: "User name is required.",
maxlength: "User name must be less than 20",
minlength: "User name must be more than 6"
},
// mail:{
// required: "Email is required",
// email: "Please enter a valid email address"
// },
// password: {
// required: "Password is <PASSWORD>"
// },
// password_confirmation: {
// required: "<PASSWORD>",
// equalTo: "Password and password confirmation must be same"
// }
}
});
// Validating service on cient side.
$("#new_service").validate({
//error place
errorPlacement: function (error, element) {
error.insertBefore(element);
},
//adding rule
rules: {
// name is required with max of 20 and min of 6
"service[name]":{
required: true,
maxlength: 115,
minlength: 3
},
"service[city_ids][]":{
required: true
},
"service[photo]":{
required: true
}
},
// error messages
messages: {
"service[name]":{
required: "Service name is required!",
maxlength: "Service name must be less than 115 characters.",
minlength: "Service name must be more than 3 characters."
},
"service[city_ids][]":{
required: "At leaset 1 city should be selected!",
},
"service[photo]":{
required: "Please upload a photo!",
}
}
});
// Validating 'Sub Service' on cient side.
$(".sub_service").validate({
//error place
errorPlacement: function (error, element) {
error.insertBefore(element);
},
//adding rule
rules: {
// name is required with max of 20 and min of 6
"service[name]":{
required: true,
maxlength: 115,
minlength: 3
},
"service[price]":{
required: true
}
},
// error messages
messages: {
"service[name]":{
required: "Sub service name is required!",
maxlength: "Sub service name must be less than 115 characters.",
minlength: "Sub service name must be more than 3 characters."
},
"service[price]":{
required: "Price is required!"
}
}
});
// Validating 'address' on cient side.
$("#address_form").validate({
//error place
errorPlacement: function (error, element) {
error.insertBefore(element);
},
//adding rule
rules: {
// name is required with max of 20 and min of 6
"address[flat_number]":{
required: true,
maxlength: 50,
minlength: 1
},
"address[street_name]":{
maxlength: 150,
minlength: 5
},
"address[pin_code]":{
required: true,
maxlength: 10,
minlength: 5
},
"address[landmark]":{
maxlength: 150,
minlength: 5
},
"country_selection":{
required: true
},
"state_selection":{
required: true
},
"city_id":{
required: true
}
},
// error messages
messages: {
"address[flat_number]":{
required: "Flat No. is required!",
maxlength: "Flat No. must be less than 50 characters.",
minlength: "Flat No. must be at-least 1 character."
},
"address[street_name]":{
maxlength: "Street name must be less than 150 characters.",
minlength: "Street name must be at-least 5 characters."
},
"address[pin_code]":{
required: "Pin code is required!",
maxlength: "Pin code must be less than 10 characters.",
minlength: "Pin code must be at-least 5 characters."
},
"address[landmark]":{
maxlength: "Landmark must be less than 150 characters.",
minlength: "Landmark must be at-least 1 character."
},
"country_selection":{
required: "Country is required!",
},
"state_selection":{
required: "State is required!",
},
"city_id":{
required: "City is required!",
},
}
}); <file_sep>/db/migrate/20180903084311_add_service_request_number_to_service_requests.rb
class AddServiceRequestNumberToServiceRequests < ActiveRecord::Migration[5.2]
def change
add_column :service_requests, :service_request_number, :string
end
end
<file_sep>/app/assets/javascripts/home.js
$(function(){
$('select').change(function () {
var op = $(this).val();
if (op != '') {
$('#sub_service_create').prop('disabled', false);
}
else
{
$('#sub_service_create').prop('disabled', true);
}
});
});
<file_sep>/app/controllers/partner/portfolios_controller.rb
class Partner::PortfoliosController < PartnerController
before_action :set_portfolio, only: [:show, :edit, :update, :destroy]
def show; end
def edit
# @portfolio = Portfolio.find(current_user.portfolio.id)
# @cities = City.details.order(:name)
# @services = Service.get_services
end
def update
if @portfolio.update(portfolio_params)
flash[:success] = 'Portfolio updated successfully!.'
redirect_to partner_portfolio_path(@portfolio)
else
render :edit
end
end
def destroy
@portfolio.destroy
flash[:success] = 'Portfolio deleted successfully!.'
redirect_to portfolios_url
end
def get_subservices
@subservices = Service.where(:parent_id => params[:parent_id])
render :partial => "subservices", :object => @subservices
end
# def get_city_service_list
# if params[:portfolio][:city_id].present?
# @city = City.find(params[:portfolio][:city_id])
# @services = @city.services
# end
# end
#UPLOAD photos for portfolio
def upload_photos
portfolio = Portfolio.find_by_id(params[:portfolio][:portfolio_id])
if params[:portfolio][:images].present?
portfolio.images += params[:portfolio][:images]
portfolio.save(validate: false)
flash[:success] = 'Photo added successfully!.'
end
redirect_to partner_portfolio_path
end
# Delete Portfolio Photo
def delete_photo
portfolio = Portfolio.find(params[:id])
remain_images = portfolio.images # copy the array
deleted_image = remain_images.delete_at(params[:index].to_i) # delete the target image
deleted_image.try(:remove!) # delete image from S3
portfolio.images = remain_images # re-assign back
portfolio.save(validate: false)
flash[:success] = 'Photo deleted successfully!.'
redirect_to partner_portfolio_path
end
private
# Use callbacks to share common setup or constraints between actions.
def set_portfolio
@portfolio = current_user.portfolio
end
# Never trust parameters from the scary internet, only allow the white list through.
def portfolio_params
params.require(:portfolio).permit(:gender, :about, :experience, :education, :avatar, :city_id, :service_id, {documents: []}, {images: []}, :company_name, :address,:company_ph_no)
end
end
<file_sep>/app/controllers/employee/employees_controller.rb
class Employee::EmployeesController < EmployeesController
def edit
end
def update
if current_user.update_attributes(employee_params)
flash[:success] = "Successfully Updated!"
else
flash[:error] = @user.errors.full_messages.join(",")
end
redirect_back fallback_location: request.referrer
end
private
def employee_params
params.require(:user).permit(:first_name,:last_name,:phone, :phone)
end
end
<file_sep>/app/controllers/employee/service_requests_controller.rb
class Employee::ServiceRequestsController < EmployeesController
before_action :get_service_requests, only: [:index, :accept_reject]
def index
# @service_requests = current_user.service_requests.includes(:service,:address,:status, :portfolio, :time_slot).ordered.paginate(:page => params[:page], :per_page => 5)
end
def accept_reject
service_request = ServiceRequest.find(params[:id])
if service_request
if Status::ACTION.include?(params[:value])
service_request.update_attributes(:status_id => Status.send(params[:value]).first.id)
end
UserMailer.accepted_rejected(current_user, service_request).deliver_now
end
end
private
def get_service_requests
@service_requests = current_user.assigned_service_requests.ordered.paginate(:page => params[:page], :per_page => 5).order("id DESC")
end
end
<file_sep>/app/models/status.rb
class Status < ApplicationRecord
ACTION =["accepted", "rejected", "pending", "inprogress", "completed", "incompleted",'onhold' ]
has_many :service_requests
scope :accepted, -> { where(name: 'Accepted') }
scope :rejected, -> { where(name: 'Rejected') }
scope :pending, -> { where(name: 'Pending') }
scope :inprogress, -> { where(name: 'Inprogress') }
scope :completed, -> { where(name: 'Completed') }
scope :incompleted, -> { where(name: 'InCompleted') }
scope :onhold, -> { where(name: 'Onhold') }
def self.get_status_value status
where(name: status).last
end
end
<file_sep>/app/assets/javascripts/service_requests.js
function rating_value_feed(selector,score){
$('#'+ selector).raty({readOnly: true, score: score });
}<file_sep>/app/controllers/partner_controller.rb
class PartnerController < ActionController::Base
before_action :authorized?, except: :index
before_action :active_partner?
layout "admin"
private
def authorized?
unless current_user && current_user.partner?
flash[:error] = "You are not authorized to view that page."
redirect_to root_path
end
end
def active_partner?
unless current_user && current_user.partner? && current_user.portfolio.status
flash[:error] = "Please update your profile or contact admin for activation!."
end
end
end
<file_sep>/db/migrate/20180827092603_create_user_profiles.rb
class CreateUserProfiles < ActiveRecord::Migration[5.2]
def change
create_table :user_profiles do |t|
t.integer :user_id
t.integer :city_id
t.integer :sub_service_id
t.string :experience
t.string :qualification
t.float :hourly_rate
t.timestamps
end
end
end
<file_sep>/app/controllers/users/invitations_controller.rb
class Users::InvitationsController < Devise::InvitationsController
prepend_before_action :authenticate_inviter!, :only => [:new, :create]
prepend_before_action :has_invitations_left?, :only => [:create]
prepend_before_action :require_no_authentication, :only => [:edit, :update, :destroy]
prepend_before_action :resource_from_invitation_token, :only => [:edit, :destroy]
if respond_to? :helper_method
helper_method :after_sign_in_path_for
end
def new
self.resource = User.new
render :new, :layout => 'admin'
end
def create
self.resource = invite_resource
resource_invited = resource.errors.empty?
yield resource if block_given?
if resource_invited
if is_flashing_format? && self.resource.invitation_sent_at
flash[:success] = "An Email has beed sent to #{self.resource.email}"
end
redirect_to partner_dashboard_index_path
else
redirect_back fallback_location: request.referrer
end
end
def edit
set_minimum_password_length
resource.invitation_token = params[:invitation_token]
render :edit
end
def update
raw_invitation_token = update_resource_params[:invitation_token]
self.resource = accept_resource
invitation_accepted = resource.errors.empty?
yield resource if block_given?
if invitation_accepted
if Devise.allow_insecure_sign_in_after_accept
resource.add_role :employee
sign_in(resource_name, resource)
redirect_to employee_root_path
else
set_flash_message :notice, :updated_not_active if is_flashing_format?
redirect_to employee_root_path
end
else
resource.invitation_token = raw_invitation_token
render :edit
end
end
protected
def invite_resource(&block)
User.invite!(invite_params, current_user, &block)
end
def accept_resource
User.accept_invitation!(update_resource_params)
end
def invite_params
devise_parameter_sanitizer.sanitize(:invite)
end
def update_resource_params
devise_parameter_sanitizer.sanitize(:accept_invitation)
end
def translation_scope
'devise.invitations'
end
end<file_sep>/db/migrate/20180829064219_create_service_requests.rb
class CreateServiceRequests < ActiveRecord::Migration[5.2]
def change
create_table :service_requests do |t|
t.integer :status_id
t.integer :user_id
t.integer :address_id
t.integer :service_id
t.timestamps
end
end
end
<file_sep>/app/helpers/portfolios_helper.rb
module PortfoliosHelper
def show_average_rating portfolio
score = portfolio.feedbacks.present? ? actual_rating_count(portfolio) : 0
render partial: "shared/show_rating" , locals: {request: portfolio, score: score }
end
def actual_rating_count portfolio
(portfolio.feedbacks.pluck(:rating).sum/portfolio.feedbacks.count)
end
end
<file_sep>/app/controllers/users/sessions_controller.rb
class Users::SessionsController < Devise::SessionsController
before_action :configure_sign_in_params, only: [:create]
skip_before_action :authenticate_user!, only: [:create], raise: false
# GET /resource/sign_in
def new
super
end
def create
user = User.find_for_authentication(:email => params[:user][:email])
if user.present?
if user.valid_password?(params[:user][:password])
get_login_details
else
@error = "Password Mismatch!!"
end
else
@error = "Email doesn't exist!!"
end
end
# DELETE /resource/sign_out
def destroy
super
end
protected
def after_login_path resource
if resource.admin?
admin_root_url
elsif resource.partner?
partner_portfolio_url
elsif resource.employee?
employee_root_url
else
dashboard_url
end
end
def get_login_details
self.resource = warden.authenticate!(auth_options)
set_flash_message!(:notice, :signed_in)
sign_in(resource_name, resource)
@success_url = after_login_path resource
end
# If you have extra params to permit, append them to the sanitizer.
def configure_sign_in_params
devise_parameter_sanitizer.permit(:sign_in, keys: [:attribute])
end
end
<file_sep>/db/migrate/20180831084414_add_portfolio_id_to_service_request.rb
class AddPortfolioIdToServiceRequest < ActiveRecord::Migration[5.2]
def change
add_column :service_requests, :portfolio_id, :integer
end
end
<file_sep>/app/helpers/admin_helper.rb
module AdminHelper
def get_rating_data user
score = user.portfolio.feedbacks.present? ? actual_rating_count(user.portfolio) : 0
render_rating_partial(user, score)
end
def actual_rating_count portfolio
(portfolio.feedbacks.pluck(:rating).sum/portfolio.feedbacks.count)
end
def get_stats object
if object == "Service"
object.constantize.get_services.count
elsif object == "User"
object.constantize.get_users(:user).count
else
object.constantize.count
end
end
def render_rating_partial user, score
render partial: "shared/show_rating" , locals: {request: user, score: score}
end
def get_service_request_rating request
if request.status == Status.completed.last
request.feedback.present? ? render_rating_partial(request, request.feedback.rating) : "Not Yet Rated"
else
"-"
end
end
end
<file_sep>/db/state.rb
State.delete_all
State.connection.execute("ALTER TABLE states AUTO_INCREMENT=1")
State.create(:id=>1, :name=>"Parroquia de Sant Julià de Loria",:country_id=>1)
State.create(:id=>2, :name=>"Parroquia d'Ordino",:country_id=>1)
State.create(:id=>3, :name=>"Parroquia de la Massana",:country_id=>1)
State.create(:id=>4, :name=>"Parroquia d'Encamp",:country_id=>1)
State.create(:id=>5, :name=>"Parroquia de Canillo",:country_id=>1)
State.create(:id=>6, :name=>"Parroquia d'Andorra la Vella",:country_id=>1)
State.create(:id=>7, :name=>"Parroquia d'Escaldes-Engordany",:country_id=>1)
State.create(:id=>8, :name=>"Umm al Qaywayn",:country_id=>2)
State.create(:id=>9, :name=>"Raʼs al Khaymah",:country_id=>2)
State.create(:id=>10, :name=>"Dubayy",:country_id=>2)
State.create(:id=>11, :name=>"<NAME>",:country_id=>2)
State.create(:id=>12, :name=>"Al Fujayrah",:country_id=>2)
State.create(:id=>13, :name=>"Ajman",:country_id=>2)
State.create(:id=>14, :name=>"<NAME>",:country_id=>2)
State.create(:id=>15, :name=>"Zabul",:country_id=>3)
State.create(:id=>16, :name=>"Vardak",:country_id=>3)
State.create(:id=>17, :name=>"Takhār",:country_id=>3)
State.create(:id=>18, :name=>"<NAME>",:country_id=>3)
State.create(:id=>19, :name=>"Samangān",:country_id=>3)
State.create(:id=>20, :name=>"Parvān",:country_id=>3)
State.create(:id=>21, :name=>"Paktīkā",:country_id=>3)
State.create(:id=>22, :name=>"Paktia",:country_id=>3)
State.create(:id=>23, :name=>"Orūzgān",:country_id=>3)
State.create(:id=>24, :name=>"Nīmrūz",:country_id=>3)
State.create(:id=>25, :name=>"Nangarhār",:country_id=>3)
State.create(:id=>26, :name=>"Lowgar",:country_id=>3)
State.create(:id=>27, :name=>"Laghmān",:country_id=>3)
State.create(:id=>28, :name=>"Kunduz",:country_id=>3)
State.create(:id=>29, :name=>"Konar",:country_id=>3)
State.create(:id=>30, :name=>"Kāpīsā",:country_id=>3)
State.create(:id=>31, :name=>"Kandahār",:country_id=>3)
State.create(:id=>32, :name=>"Kabul",:country_id=>3)
State.create(:id=>33, :name=>"Jowzjān",:country_id=>3)
State.create(:id=>34, :name=>"Herat",:country_id=>3)
State.create(:id=>35, :name=>"Helmand",:country_id=>3)
State.create(:id=>36, :name=>"Ghowr",:country_id=>3)
State.create(:id=>37, :name=>"Ghaznī",:country_id=>3)
State.create(:id=>38, :name=>"Faryab",:country_id=>3)
State.create(:id=>39, :name=>"Farah",:country_id=>3)
State.create(:id=>40, :name=>"Bāmīān",:country_id=>3)
State.create(:id=>41, :name=>"Balkh",:country_id=>3)
State.create(:id=>42, :name=>"Baghlān",:country_id=>3)
State.create(:id=>43, :name=>"Badghis",:country_id=>3)
State.create(:id=>44, :name=>"Badakhshan",:country_id=>3)
State.create(:id=>45, :name=>"Khowst",:country_id=>3)
State.create(:id=>46, :name=>"Nūrestān",:country_id=>3)
State.create(:id=>47, :name=>"Wilāyat-e Dāykundī",:country_id=>3)
State.create(:id=>48, :name=>"Panjshir",:country_id=>3)
State.create(:id=>49, :name=>"Saint Philip",:country_id=>4)
State.create(:id=>50, :name=>"<NAME>",:country_id=>4)
State.create(:id=>51, :name=>"<NAME>",:country_id=>4)
State.create(:id=>52, :name=>"<NAME>",:country_id=>4)
State.create(:id=>53, :name=>"<NAME>",:country_id=>4)
State.create(:id=>54, :name=>"Saint George",:country_id=>4)
State.create(:id=>55, :name=>"Redonda",:country_id=>4)
State.create(:id=>56, :name=>"Barbuda",:country_id=>4)
State.create(:id=>57, :name=>"Berat",:country_id=>6)
State.create(:id=>58, :name=>"Dibër",:country_id=>6)
State.create(:id=>59, :name=>"Elbasan",:country_id=>6)
State.create(:id=>60, :name=>"Gjirokastër",:country_id=>6)
State.create(:id=>61, :name=>"Korçë",:country_id=>6)
State.create(:id=>62, :name=>"Kukës",:country_id=>6)
State.create(:id=>63, :name=>"Durrës",:country_id=>6)
State.create(:id=>64, :name=>"Fier",:country_id=>6)
State.create(:id=>65, :name=>"Lezhë",:country_id=>6)
State.create(:id=>66, :name=>"Shkodër",:country_id=>6)
State.create(:id=>67, :name=>"Tiranë",:country_id=>6)
State.create(:id=>68, :name=>"Vlorë",:country_id=>6)
State.create(:id=>69, :name=>"Ararat",:country_id=>7)
State.create(:id=>70, :name=>"Syunikʼ",:country_id=>7)
State.create(:id=>71, :name=>"<NAME>",:country_id=>7)
State.create(:id=>72, :name=>"Yerevan",:country_id=>7)
State.create(:id=>73, :name=>"Aragatsotn",:country_id=>7)
State.create(:id=>74, :name=>"Armavir",:country_id=>7)
State.create(:id=>75, :name=>"Gegharkʼunikʼ",:country_id=>7)
State.create(:id=>76, :name=>"Kotaykʼ",:country_id=>7)
State.create(:id=>77, :name=>"Lorri",:country_id=>7)
State.create(:id=>78, :name=>"Shirak",:country_id=>7)
State.create(:id=>79, :name=>"Tavush",:country_id=>7)
State.create(:id=>80, :name=>"<NAME>",:country_id=>8)
State.create(:id=>81, :name=>"<NAME>",:country_id=>8)
State.create(:id=>82, :name=>"Moxico",:country_id=>8)
State.create(:id=>83, :name=>"<NAME>",:country_id=>8)
State.create(:id=>84, :name=>"Zaire",:country_id=>8)
State.create(:id=>85, :name=>"Uíge",:country_id=>8)
State.create(:id=>86, :name=>"Malanje",:country_id=>8)
State.create(:id=>87, :name=>"Luanda",:country_id=>8)
State.create(:id=>88, :name=>"<NAME>",:country_id=>8)
State.create(:id=>89, :name=>"Cabinda",:country_id=>8)
State.create(:id=>90, :name=>"Bengo",:country_id=>8)
State.create(:id=>91, :name=>"Namibe",:country_id=>8)
State.create(:id=>92, :name=>"Huíla",:country_id=>8)
State.create(:id=>93, :name=>"Huambo",:country_id=>8)
State.create(:id=>94, :name=>"Cunene",:country_id=>8)
State.create(:id=>95, :name=>"<NAME>",:country_id=>8)
State.create(:id=>96, :name=>"Bié",:country_id=>8)
State.create(:id=>97, :name=>"Benguela",:country_id=>8)
State.create(:id=>98, :name=>"Misiones",:country_id=>10)
State.create(:id=>99, :name=>"Formosa",:country_id=>10)
State.create(:id=>100, :name=>"Buenos Aires F.D.",:country_id=>10)
State.create(:id=>101, :name=>"Entre Ríos",:country_id=>10)
State.create(:id=>102, :name=>"Corrientes",:country_id=>10)
State.create(:id=>103, :name=>"Buenos Aires",:country_id=>10)
State.create(:id=>104, :name=>"Tucumán",:country_id=>10)
State.create(:id=>105, :name=>"Tierra del Fuego",:country_id=>10)
State.create(:id=>106, :name=>"Santiago del Estero",:country_id=>10)
State.create(:id=>107, :name=>"Santa Fe",:country_id=>10)
State.create(:id=>108, :name=>"Santa Cruz",:country_id=>10)
State.create(:id=>109, :name=>"<NAME>uis",:country_id=>10)
State.create(:id=>110, :name=>"<NAME>",:country_id=>10)
State.create(:id=>111, :name=>"Salta",:country_id=>10)
State.create(:id=>112, :name=>"<NAME>",:country_id=>10)
State.create(:id=>113, :name=>"Neuquén",:country_id=>10)
State.create(:id=>114, :name=>"Mendoza",:country_id=>10)
State.create(:id=>115, :name=>"La Rioja",:country_id=>10)
State.create(:id=>116, :name=>"La Pampa",:country_id=>10)
State.create(:id=>117, :name=>"Jujuy",:country_id=>10)
State.create(:id=>118, :name=>"Córdoba",:country_id=>10)
State.create(:id=>119, :name=>"Chubut",:country_id=>10)
State.create(:id=>120, :name=>"Chaco",:country_id=>10)
State.create(:id=>121, :name=>"Catamarca",:country_id=>10)
State.create(:id=>122, ,:name=>"Western District",:country_id=>11)
State.create(:id=>123, ,:name=>"Rose Island (inactive)",:country_id=>11)
State.create(:id=>124, ,:name=>"Swains Island",:country_id=>11)
State.create(:id=>125, ,:name=>"Eastern District",:country_id=>11)
State.create(:id=>126, ,:name=>"Manu'a",:country_id=>11)
State.create(:id=>127, :name=>"Vienna",:country_id=>12)
State.create(:id=>128, :name=>"Vorarlberg",:country_id=>12)
State.create(:id=>129, :name=>"Tyrol",:country_id=>12)
State.create(:id=>130, :name=>"Styria",:country_id=>12)
State.create(:id=>131, :name=>"Salzburg",:country_id=>12)
State.create(:id=>132, :name=>"Upper Austria",:country_id=>12)
State.create(:id=>133, :name=>"Lower Austria",:country_id=>12)
State.create(:id=>134, :name=>"Carinthia",:country_id=>12)
State.create(:id=>135, :name=>"Burgenland",:country_id=>12)
State.create(:id=>136, :name=>"Western Australia",:country_id=>13)
State.create(:id=>137, :name=>"South Australia",:country_id=>13)
State.create(:id=>138, :name=>"Northern Territory",:country_id=>13)
State.create(:id=>139, :name=>"Victoria",:country_id=>13)
State.create(:id=>140, :name=>"Tasmania",:country_id=>13)
State.create(:id=>141, :name=>"Queensland",:country_id=>13)
State.create(:id=>142, :name=>"New South Wales",:country_id=>13)
State.create(:id=>143, :name=>"Australian Capital Territory",:country_id=>13)
State.create(:id=>144, ,:name=>"Vårdö",:country_id=>15)
State.create(:id=>145, ,:name=>"Sund",:country_id=>15)
State.create(:id=>146, ,:name=>"Sottunga",:country_id=>15)
State.create(:id=>147, ,:name=>"Saltvik",:country_id=>15)
State.create(:id=>148, ,:name=>"Lumparland",:country_id=>15)
State.create(:id=>149, ,:name=>"Lemland",:country_id=>15)
State.create(:id=>150, ,:name=>"Kumlinge",:country_id=>15)
State.create(:id=>151, ,:name=>"Kökar",:country_id=>15)
State.create(:id=>152, ,:name=>"Föglö",:country_id=>15)
State.create(:id=>153, ,:name=>"Brändö",:country_id=>15)
State.create(:id=>154, ,:name=>"Mariehamn",:country_id=>15)
State.create(:id=>155, ,:name=>"Jomala",:country_id=>15)
State.create(:id=>156, ,:name=>"Hammarland",:country_id=>15)
State.create(:id=>157, ,:name=>"Geta",:country_id=>15)
State.create(:id=>158, ,:name=>"Finström",:country_id=>15)
State.create(:id=>159, ,:name=>"Eckerö",:country_id=>15)
State.create(:id=>160, :name=>"Zǝngilan",:country_id=>16)
State.create(:id=>161, :name=>"Yardımlı",:country_id=>16)
State.create(:id=>162, :name=>"Şuşa",:country_id=>16)
State.create(:id=>163, :name=>"Salyan",:country_id=>16)
State.create(:id=>164, :name=>"Sabirabad",:country_id=>16)
State.create(:id=>165, :name=>"Saatlı",:country_id=>16)
State.create(:id=>166, :name=>"Neftçala",:country_id=>16)
State.create(:id=>167, :name=>"Nakhichevan",:country_id=>16)
State.create(:id=>168, :name=>"Masallı",:country_id=>16)
State.create(:id=>169, :name=>"Lerik",:country_id=>16)
State.create(:id=>170, :name=>"Lənkəran",:country_id=>16)
State.create(:id=>171, :name=>"Laçın",:country_id=>16)
State.create(:id=>172, :name=>"Qubadlı",:country_id=>16)
State.create(:id=>173, :name=>"İmişli",:country_id=>16)
State.create(:id=>174, :name=>"Füzuli",:country_id=>16)
State.create(:id=>175, :name=>"Cǝbrayıl",:country_id=>16)
State.create(:id=>176, :name=>"Cəlilabad",:country_id=>16)
State.create(:id=>177, :name=>"Astara",:country_id=>16)
State.create(:id=>178, :name=>"Xocalı",:country_id=>16)
State.create(:id=>179, :name=>"Ağcabǝdi",:country_id=>16)
State.create(:id=>180, :name=>"Ağdam",:country_id=>16)
State.create(:id=>181, :name=>"Əli Bayramli",:country_id=>16)
State.create(:id=>182, :name=>"Lənkəran Şəhəri",:country_id=>16)
State.create(:id=>183, :name=>"Şuşa Şəhəri",:country_id=>16)
State.create(:id=>184, :name=>"Tǝrtǝr",:country_id=>16)
State.create(:id=>185, :name=>"Xankǝndi",:country_id=>16)
State.create(:id=>186, :name=>"Xocavǝnd",:country_id=>16)
State.create(:id=>187, :name=>"Zərdab",:country_id=>16)
State.create(:id=>188, :name=>"Zaqatala",:country_id=>16)
State.create(:id=>189, :name=>"Yevlax",:country_id=>16)
State.create(:id=>190, :name=>"Oğuz",:country_id=>16)
State.create(:id=>191, :name=>"Ucar",:country_id=>16)
State.create(:id=>192, :name=>"Tovuz",:country_id=>16)
State.create(:id=>193, :name=>"Şamaxı",:country_id=>16)
State.create(:id=>194, :name=>"Şǝki",:country_id=>16)
State.create(:id=>195, :name=>"Şǝmkir",:country_id=>16)
State.create(:id=>196, :name=>"Kürdǝmir",:country_id=>16)
State.create(:id=>197, :name=>"Qǝbǝlǝ",:country_id=>16)
State.create(:id=>198, :name=>"Qusar",:country_id=>16)
State.create(:id=>199, :name=>"Quba",:country_id=>16)
State.create(:id=>200, :name=>"Xanlar",:country_id=>16)
State.create(:id=>201, :name=>"Xaçmaz",:country_id=>16)
State.create(:id=>202, :name=>"Kǝlbǝcǝr",:country_id=>16)
State.create(:id=>203, :name=>"Qazax",:country_id=>16)
State.create(:id=>204, :name=>"Goranboy",:country_id=>16)
State.create(:id=>205, :name=>"Qǝx",:country_id=>16)
State.create(:id=>206, :name=>"İsmayıllı",:country_id=>16)
State.create(:id=>207, :name=>"Göyçay",:country_id=>16)
State.create(:id=>208, :name=>"Dǝvǝçi",:country_id=>16)
State.create(:id=>209, :name=>"Daşkǝsǝn",:country_id=>16)
State.create(:id=>210, :name=>"Balakǝn",:country_id=>16)
State.create(:id=>211, :name=>"Bǝrdǝ",:country_id=>16)
State.create(:id=>212, :name=>"Baki",:country_id=>16)
State.create(:id=>213, :name=>"Abşeron",:country_id=>16)
State.create(:id=>214, :name=>"Ağsu",:country_id=>16)
State.create(:id=>215, :name=>"Ağdaş",:country_id=>16)
State.create(:id=>216, :name=>"Gǝdǝbǝy",:country_id=>16)
State.create(:id=>217, :name=>"Ağstafa",:country_id=>16)
State.create(:id=>218, :name=>"Gǝncǝ",:country_id=>16)
State.create(:id=>219, :name=>"Mingǝcevir",:country_id=>16)
State.create(:id=>220, :name=>"Naftalan",:country_id=>16)
State.create(:id=>221, :name=>"Qobustan",:country_id=>16)
State.create(:id=>222, :name=>"Samux",:country_id=>16)
State.create(:id=>223, :name=>"Shaki City",:country_id=>16)
State.create(:id=>224, :name=>"Siyǝzǝn",:country_id=>16)
State.create(:id=>225, :name=>"Sumqayit",:country_id=>16)
State.create(:id=>226, :name=>"Xızı",:country_id=>16)
State.create(:id=>227, :name=>"Yevlax City",:country_id=>16)
State.create(:id=>228, :name=>"Hacıqabul",:country_id=>16)
State.create(:id=>229, ,:name=>"Babek",:country_id=>16)
State.create(:id=>230, :name=>"Federation of Bosnia and Herzegovina",:country_id=>17)
State.create(:id=>231, :name=>"Republika Srpska",:country_id=>17)
State.create(:id=>232, ,:name=>"Brčko",:country_id=>17)
State.create(:id=>233, :name=>"<NAME>",:country_id=>18)
State.create(:id=>234, :name=>"<NAME>",:country_id=>18)
State.create(:id=>235, :name=>"<NAME>",:country_id=>18)
State.create(:id=>236, :name=>"<NAME>",:country_id=>18)
State.create(:id=>237, :name=>"<NAME>",:country_id=>18)
State.create(:id=>238, :name=>"<NAME>",:country_id=>18)
State.create(:id=>239, :name=>"<NAME>",:country_id=>18)
State.create(:id=>240, :name=>"<NAME>",:country_id=>18)
State.create(:id=>241, :name=>"<NAME>",:country_id=>18)
State.create(:id=>242, :name=>"<NAME>",:country_id=>18)
State.create(:id=>243, :name=>"<NAME>",:country_id=>18)
State.create(:id=>244, :name=>"Rājshāhi",:country_id=>19)
State.create(:id=>245, :name=>"Dhaka",:country_id=>19)
State.create(:id=>246, :name=>"Chittagong",:country_id=>19)
State.create(:id=>247, :name=>"Khulna",:country_id=>19)
State.create(:id=>248, :name=>"Barisāl",:country_id=>19)
State.create(:id=>249, :name=>"Sylhet",:country_id=>19)
State.create(:id=>250, :name=>"Rangpur Division",:country_id=>19)
State.create(:id=>251, ,:name=>"Brussels Capital Region",:country_id=>20)
State.create(:id=>252, ,:name=>"Walloon Region",:country_id=>20)
State.create(:id=>253, ,:name=>"Flanders",:country_id=>20)
State.create(:id=>254, :name=>"Boucle du Mouhoun",:country_id=>21)
State.create(:id=>255, :name=>"Cascades",:country_id=>21)
State.create(:id=>256, :name=>"Centre",:country_id=>21)
State.create(:id=>257, :name=>"Centre-Est",:country_id=>21)
State.create(:id=>258, :name=>"Centre-Nord",:country_id=>21)
State.create(:id=>259, :name=>"Centre-Ouest",:country_id=>21)
State.create(:id=>260, :name=>"Centre-Sud",:country_id=>21)
State.create(:id=>261, :name=>"Est",:country_id=>21)
State.create(:id=>262, :name=>"Hauts-Bassins",:country_id=>21)
State.create(:id=>263, :name=>"Nord",:country_id=>21)
State.create(:id=>264, :name=>"Plateau-Central",:country_id=>21)
State.create(:id=>265, :name=>"Sahel",:country_id=>21)
State.create(:id=>266, :name=>"Sud-Ouest",:country_id=>21)
State.create(:id=>267, :name=>"Razgrad",:country_id=>22)
State.create(:id=>268, :name=>"Montana",:country_id=>22)
State.create(:id=>269, :name=>"Vratsa",:country_id=>22)
State.create(:id=>270, :name=>"Varna",:country_id=>22)
State.create(:id=>271, :name=>"Dobrich",:country_id=>22)
State.create(:id=>272, :name=>"Sofiya",:country_id=>22)
State.create(:id=>273, :name=>"Ruse",:country_id=>22)
State.create(:id=>274, :name=>"Plovdiv",:country_id=>22)
State.create(:id=>275, :name=>"Pleven",:country_id=>22)
State.create(:id=>276, :name=>"Pernik",:country_id=>22)
State.create(:id=>277, :name=>"Pazardzhit",:country_id=>22)
State.create(:id=>278, :name=>"Lovech",:country_id=>22)
State.create(:id=>279, :name=>"Khaskovo",:country_id=>22)
State.create(:id=>280, :name=>"<NAME>",:country_id=>22)
State.create(:id=>281, :name=>"Burgas",:country_id=>22)
State.create(:id=>282, :name=>"Blagoevgrad",:country_id=>22)
State.create(:id=>283, :name=>"Gabrovo",:country_id=>22)
State.create(:id=>284, :name=>"Kŭrdzhali",:country_id=>22)
State.create(:id=>285, :name=>"Kyustendil",:country_id=>22)
State.create(:id=>286, :name=>"Shumen",:country_id=>22)
State.create(:id=>287, :name=>"Silistra",:country_id=>22)
State.create(:id=>288, :name=>"Sliven",:country_id=>22)
State.create(:id=>289, :name=>"Smolyan",:country_id=>22)
State.create(:id=>290, :name=>"<NAME>",:country_id=>22)
State.create(:id=>291, :name=>"Tŭrgovishte",:country_id=>22)
State.create(:id=>292, :name=>"<NAME>",:country_id=>22)
State.create(:id=>293, :name=>"Vidin",:country_id=>22)
State.create(:id=>294, :name=>"Yambol",:country_id=>22)
State.create(:id=>295, :name=>"Muharraq",:country_id=>23)
State.create(:id=>296, :name=>"Capital Governorate",:country_id=>23)
State.create(:id=>297, :name=>"Southern Governorate",:country_id=>23)
State.create(:id=>298, :name=>"Central Governorate",:country_id=>23)
State.create(:id=>299, :name=>"Northern Governorate",:country_id=>23)
State.create(:id=>300, :name=>"Makamba",:country_id=>24)
State.create(:id=>301, :name=>"Bururi",:country_id=>24)
State.create(:id=>302, :name=>"Muramvya",:country_id=>24)
State.create(:id=>303, :name=>"Gitega",:country_id=>24)
State.create(:id=>304, :name=>"Ruyigi",:country_id=>24)
State.create(:id=>305, :name=>"Cankuzo",:country_id=>24)
State.create(:id=>306, :name=>"Karuzi",:country_id=>24)
State.create(:id=>307, :name=>"Bubanza",:country_id=>24)
State.create(:id=>308, :name=>"Cibitoke",:country_id=>24)
State.create(:id=>309, :name=>"Ngozi",:country_id=>24)
State.create(:id=>310, :name=>"Kayanza",:country_id=>24)
State.create(:id=>311, :name=>"Muyinga",:country_id=>24)
State.create(:id=>312, :name=>"Kirundo",:country_id=>24)
State.create(:id=>313, :name=>"Rutana",:country_id=>24)
State.create(:id=>314, :name=>"Mwaro",:country_id=>24)
State.create(:id=>315, :name=>"<NAME>",:country_id=>24)
State.create(:id=>316, :name=>"Bujumbura Rural",:country_id=>24)
State.create(:id=>317, :name=>"Zou",:country_id=>25)
State.create(:id=>318, :name=>"Quémé",:country_id=>25)
State.create(:id=>319, :name=>"Mono",:country_id=>25)
State.create(:id=>320, :name=>"Borgou",:country_id=>25)
State.create(:id=>321, :name=>"Atlantique",:country_id=>25)
State.create(:id=>322, :name=>"Atakora",:country_id=>25)
State.create(:id=>323, :name=>"Alibori",:country_id=>25)
State.create(:id=>324, :name=>"Collines",:country_id=>25)
State.create(:id=>325, :name=>"Kouffo",:country_id=>25)
State.create(:id=>326, :name=>"Donga",:country_id=>25)
State.create(:id=>327, :name=>"Littoral",:country_id=>25)
State.create(:id=>328, :name=>"Plateau",:country_id=>25)
State.create(:id=>329, :name=>"Warwick",:country_id=>27)
State.create(:id=>330, :name=>"Southampton",:country_id=>27)
State.create(:id=>331, :name=>"Smithʼs",:country_id=>27)
State.create(:id=>332, :name=>"Sandys",:country_id=>27)
State.create(:id=>333, :name=>"Saint Georgeʼs",:country_id=>27)
State.create(:id=>334, :name=>"Saint George",:country_id=>27)
State.create(:id=>335, :name=>"Pembroke",:country_id=>27)
State.create(:id=>336, :name=>"Paget",:country_id=>27)
State.create(:id=>337, :name=>"<NAME>",:country_id=>27)
State.create(:id=>338, :name=>"Hamilton city",:country_id=>27)
State.create(:id=>339, :name=>"Devonshire",:country_id=>27)
State.create(:id=>340, :name=>"Tutong",:country_id=>28)
State.create(:id=>341, :name=>"Temburong",:country_id=>28)
State.create(:id=>342, :name=>"<NAME>",:country_id=>28)
State.create(:id=>343, :name=>"Belait",:country_id=>28)
State.create(:id=>344, :name=>"Tarija",:country_id=>29)
State.create(:id=>345, :name=>"Santa Cruz",:country_id=>29)
State.create(:id=>346, :name=>"Potosí",:country_id=>29)
State.create(:id=>347, :name=>"Pando",:country_id=>29)
State.create(:id=>348, :name=>"Oruro",:country_id=>29)
State.create(:id=>349, :name=>"<NAME>",:country_id=>29)
State.create(:id=>350, :name=>"Cochabamba",:country_id=>29)
State.create(:id=>351, :name=>"Chuquisaca",:country_id=>29)
State.create(:id=>352, :name=>"<NAME>",:country_id=>29)
State.create(:id=>353, :name=>"Bonaire",:country_id=>30)
State.create(:id=>354, :name=>"Saba",:country_id=>30)
State.create(:id=>355, :name=>"<NAME>",:country_id=>30)
State.create(:id=>356, :name=>"<NAME>",:country_id=>31)
State.create(:id=>357, :name=>"Piauí",:country_id=>31)
State.create(:id=>358, :name=>"Estado de Pernambuco",:country_id=>31)
State.create(:id=>359, :name=>"Paraíba",:country_id=>31)
State.create(:id=>360, :name=>"Pará",:country_id=>31)
State.create(:id=>361, :name=>"Maranhão",:country_id=>31)
State.create(:id=>362, :name=>"Ceará",:country_id=>31)
State.create(:id=>363, :name=>"Amapá",:country_id=>31)
State.create(:id=>364, :name=>"Alagoas",:country_id=>31)
State.create(:id=>365, :name=>"Sergipe",:country_id=>31)
State.create(:id=>366, :name=>"São Paulo",:country_id=>31)
State.create(:id=>367, :name=>"Santa Catarina",:country_id=>31)
State.create(:id=>368, :name=>"Rio Grande do Sul",:country_id=>31)
State.create(:id=>369, :name=>"Rio de Janeiro",:country_id=>31)
State.create(:id=>370, :name=>"Paraná",:country_id=>31)
State.create(:id=>371, :name=>"Estado de Minas Gerais",:country_id=>31)
State.create(:id=>372, :name=>"Mato Grosso do Sul",:country_id=>31)
State.create(:id=>373, :name=>"<NAME>",:country_id=>31)
State.create(:id=>374, :name=>"Goiás",:country_id=>31)
State.create(:id=>375, :name=>"Distrito Federal",:country_id=>31)
State.create(:id=>376, :name=>"Estado de Espírito Santo",:country_id=>31)
State.create(:id=>377, :name=>"Bahia",:country_id=>31)
State.create(:id=>378, :name=>"Tocantins",:country_id=>31)
State.create(:id=>379, :name=>"Roraima",:country_id=>31)
State.create(:id=>380, :name=>"Amazonas",:country_id=>31)
State.create(:id=>381, :name=>"Acre",:country_id=>31)
State.create(:id=>382, :name=>"Rondônia",:country_id=>31)
State.create(:id=>383, :name=>"San Salvador and Rum Cay",:country_id=>32)
State.create(:id=>384, :name=>"Sandy Point",:country_id=>32)
State.create(:id=>385, :name=>"Rock Sound",:country_id=>32)
State.create(:id=>386, :name=>"Ragged Island",:country_id=>32)
State.create(:id=>387, :name=>"Nichollstown and Berry Islands",:country_id=>32)
State.create(:id=>388, :name=>"New Providence",:country_id=>32)
State.create(:id=>389, :name=>"Mayaguana",:country_id=>32)
State.create(:id=>390, :name=>"Marsh Harbour",:country_id=>32)
State.create(:id=>391, :name=>"Long Island",:country_id=>32)
State.create(:id=>392, :name=>"Kemps Bay",:country_id=>32)
State.create(:id=>393, :name=>"Inagua",:country_id=>32)
State.create(:id=>394, :name=>"High Rock",:country_id=>32)
State.create(:id=>395, :name=>"Harbour Island",:country_id=>32)
State.create(:id=>396, :name=>"Green Turtle Cay",:country_id=>32)
State.create(:id=>397, :name=>"Governorʼs Harbour",:country_id=>32)
State.create(:id=>398, :name=>"Fresh Creek",:country_id=>32)
State.create(:id=>399, :name=>"Freeport",:country_id=>32)
State.create(:id=>400, :name=>"Exuma",:country_id=>32)
State.create(:id=>401, :name=>"Cat Island",:country_id=>32)
State.create(:id=>402, :name=>"Bimini",:country_id=>32)
State.create(:id=>403, :name=>"Acklins and Crooked Islands",:country_id=>32)
State.create(:id=>404, :name=>"Bumthang",:country_id=>33)
State.create(:id=>405, :name=>"Chhukha",:country_id=>33)
State.create(:id=>406, :name=>"Daga",:country_id=>33)
State.create(:id=>407, :name=>"Chirang",:country_id=>33)
State.create(:id=>408, :name=>"Geylegphug",:country_id=>33)
State.create(:id=>409, :name=>"Ha",:country_id=>33)
State.create(:id=>410, :name=>"Lhuntshi",:country_id=>33)
State.create(:id=>411, :name=>"Mongar",:country_id=>33)
State.create(:id=>412, :name=>"Paro",:country_id=>33)
State.create(:id=>413, :name=>"Pemagatsel",:country_id=>33)
State.create(:id=>414, :name=>"Punakha",:country_id=>33)
State.create(:id=>415, :name=>"Samchi",:country_id=>33)
State.create(:id=>416, :name=>"<NAME>",:country_id=>33)
State.create(:id=>417, :name=>"Shemgang",:country_id=>33)
State.create(:id=>418, :name=>"Tashigang",:country_id=>33)
State.create(:id=>419, :name=>"Thimphu",:country_id=>33)
State.create(:id=>420, :name=>"Tongsa",:country_id=>33)
State.create(:id=>421, :name=>"<NAME>",:country_id=>33)
State.create(:id=>422, :name=>"Gasa",:country_id=>33)
State.create(:id=>423, :name=>"<NAME>",:country_id=>33)
State.create(:id=>424, :name=>"Southern",:country_id=>35)
State.create(:id=>425, :name=>"South East",:country_id=>35)
State.create(:id=>426, :name=>"North East",:country_id=>35)
State.create(:id=>427, :name=>"North West",:country_id=>35)
State.create(:id=>428, :name=>"Kweneng",:country_id=>35)
State.create(:id=>429, :name=>"Kgatleng",:country_id=>35)
State.create(:id=>430, :name=>"Kgalagadi",:country_id=>35)
State.create(:id=>431, :name=>"Ghanzi",:country_id=>35)
State.create(:id=>432, :name=>"Central",:country_id=>35)
State.create(:id=>433, :name=>"Vitsyebskaya Voblastsʼ",:country_id=>36)
State.create(:id=>434, :name=>"Mahilyowskaya Voblastsʼ",:country_id=>36)
State.create(:id=>435, :name=>"Minskaya Voblastsʼ",:country_id=>36)
State.create(:id=>436, :name=>"Horad Minsk",:country_id=>36)
State.create(:id=>437, :name=>"Hrodzyenskaya Voblastsʼ",:country_id=>36)
State.create(:id=>438, :name=>"Homyelʼskaya Voblastsʼ",:country_id=>36)
State.create(:id=>439, :name=>"Brestskaya Voblastsʼ",:country_id=>36)
State.create(:id=>440, :name=>"Toledo",:country_id=>37)
State.create(:id=>441, :name=>"Stann Creek",:country_id=>37)
State.create(:id=>442, :name=>"Orange Walk",:country_id=>37)
State.create(:id=>443, :name=>"Corozal",:country_id=>37)
State.create(:id=>444, :name=>"Cayo",:country_id=>37)
State.create(:id=>445, :name=>"Belize",:country_id=>37)
State.create(:id=>446, :name=>"Alberta",:country_id=>38)
State.create(:id=>447, :name=>"British Columbia",:country_id=>38)
State.create(:id=>448, :name=>"Manitoba",:country_id=>38)
State.create(:id=>449, :name=>"New Brunswick",:country_id=>38)
State.create(:id=>450, :name=>"Northwest Territories",:country_id=>38)
State.create(:id=>451, :name=>"Nova Scotia",:country_id=>38)
State.create(:id=>452, :name=>"Nunavut",:country_id=>38)
State.create(:id=>453, :name=>"Ontario",:country_id=>38)
State.create(:id=>454, :name=>"Prince Edward Island",:country_id=>38)
State.create(:id=>455, :name=>"Quebec",:country_id=>38)
State.create(:id=>456, :name=>"Saskatchewan",:country_id=>38)
State.create(:id=>457, :name=>"Yukon",:country_id=>38)
State.create(:id=>458, :name=>"Newfoundland and Labrador",:country_id=>38)
State.create(:id=>459, :name=>"Sud-Kivu",:country_id=>40)
State.create(:id=>460, :name=>"Katanga",:country_id=>40)
State.create(:id=>461, :name=>"Nord-Kivu",:country_id=>40)
State.create(:id=>462, :name=>"Maniema",:country_id=>40)
State.create(:id=>463, :name=>"Kasaï-Oriental",:country_id=>40)
State.create(:id=>464, :name=>"Kasaï-Occidental",:country_id=>40)
State.create(:id=>465, :name=>"Orientale",:country_id=>40)
State.create(:id=>466, :name=>"Équateur",:country_id=>40)
State.create(:id=>467, :name=>"Kinshasa",:country_id=>40)
State.create(:id=>468, :name=>"Bas-Congo",:country_id=>40)
State.create(:id=>469, :name=>"Bandundu",:country_id=>40)
State.create(:id=>470, :name=>"Vakaga",:country_id=>41)
State.create(:id=>471, :name=>"Ouaka",:country_id=>41)
State.create(:id=>472, :name=>"Mbomou",:country_id=>41)
State.create(:id=>473, :name=>"Haut-Mbomou",:country_id=>41)
State.create(:id=>474, :name=>"Haute-Kotto",:country_id=>41)
State.create(:id=>475, :name=>"Basse-Kotto",:country_id=>41)
State.create(:id=>476, :name=>"Bamingui-Bangoran",:country_id=>41)
State.create(:id=>477, :name=>"Sangha-Mbaéré",:country_id=>41)
State.create(:id=>478, :name=>"Ouham-Pendé",:country_id=>41)
State.create(:id=>479, :name=>"Ouham",:country_id=>41)
State.create(:id=>480, :name=>"Ombella-Mpoko",:country_id=>41)
State.create(:id=>481, :name=>"Nana-Mambéré",:country_id=>41)
State.create(:id=>482, :name=>"Lobaye",:country_id=>41)
State.create(:id=>483, :name=>"Kémo",:country_id=>41)
State.create(:id=>484, :name=>"Mambéré-Kadéï",:country_id=>41)
State.create(:id=>485, :name=>"Nana-Grébizi",:country_id=>41)
State.create(:id=>486, :name=>"Bangui",:country_id=>41)
State.create(:id=>487, :name=>"Sangha",:country_id=>42)
State.create(:id=>488, :name=>"Pool",:country_id=>42)
State.create(:id=>489, :name=>"Plateaux",:country_id=>42)
State.create(:id=>490, :name=>"Niari",:country_id=>42)
State.create(:id=>491, :name=>"Likouala",:country_id=>42)
State.create(:id=>492, :name=>"Lékoumou",:country_id=>42)
State.create(:id=>493, :name=>"Kouilou",:country_id=>42)
State.create(:id=>494, :name=>"Cuvette",:country_id=>42)
State.create(:id=>495, :name=>"Bouenza",:country_id=>42)
State.create(:id=>496, :name=>"Brazzaville",:country_id=>42)
State.create(:id=>497, :name=>"Cuvette-Ouest",:country_id=>42)
State.create(:id=>498, :name=>"Pointe-Noire",:country_id=>42)
State.create(:id=>499, :name=>"Zürich",:country_id=>43)
State.create(:id=>500, :name=>"Zug",:country_id=>43)
State.create(:id=>501, :name=>"Vaud",:country_id=>43)
State.create(:id=>502, :name=>"Valais",:country_id=>43)
State.create(:id=>503, :name=>"Uri",:country_id=>43)
State.create(:id=>504, :name=>"Ticino",:country_id=>43)
State.create(:id=>505, :name=>"Thurgau",:country_id=>43)
State.create(:id=>506, :name=>"Solothurn",:country_id=>43)
State.create(:id=>507, :name=>"Schwyz",:country_id=>43)
State.create(:id=>508, :name=>"Schaffhausen",:country_id=>43)
State.create(:id=>509, :name=>"<NAME>",:country_id=>43)
State.create(:id=>510, :name=>"Obwalden",:country_id=>43)
State.create(:id=>511, :name=>"Nidwalden",:country_id=>43)
State.create(:id=>512, :name=>"Neuchâtel",:country_id=>43)
State.create(:id=>513, :name=>"Luzern",:country_id=>43)
State.create(:id=>514, :name=>"Jura",:country_id=>43)
State.create(:id=>515, :name=>"Graubünden",:country_id=>43)
State.create(:id=>516, :name=>"Glarus",:country_id=>43)
State.create(:id=>517, :name=>"Genève",:country_id=>43)
State.create(:id=>518, :name=>"Fribourg",:country_id=>43)
State.create(:id=>519, :name=>"Bern",:country_id=>43)
State.create(:id=>520, :name=>"Basel-City",:country_id=>43)
State.create(:id=>521, :name=>"Basel-Landschaft",:country_id=>43)
State.create(:id=>522, :name=>"Appenzell Ausserrhoden",:country_id=>43)
State.create(:id=>523, :name=>"<NAME>",:country_id=>43)
State.create(:id=>524, :name=>"Aargau",:country_id=>43)
State.create(:id=>525, :name=>"Lagunes",:country_id=>44)
State.create(:id=>526, :name=>"Sud-Comoé",:country_id=>44)
State.create(:id=>527, :name=>"Agnéby",:country_id=>44)
State.create(:id=>528, :name=>"Haut-Sassandra",:country_id=>44)
State.create(:id=>529, :name=>"Savanes",:country_id=>44)
State.create(:id=>530, :name=>"<NAME>",:country_id=>44)
State.create(:id=>531, :name=>"Moyen-Comoé",:country_id=>44)
State.create(:id=>532, :name=>"<NAME>",:country_id=>44)
State.create(:id=>533, :name=>"Lacs",:country_id=>44)
State.create(:id=>534, :name=>"Zanzan",:country_id=>44)
State.create(:id=>535, :name=>"Bas-Sassandra",:country_id=>44)
State.create(:id=>536, :name=>"Worodougou",:country_id=>44)
State.create(:id=>537, :name=>"Denguélé",:country_id=>44)
State.create(:id=>538, :name=>"Sud-Bandama",:country_id=>44)
State.create(:id=>539, :name=>"Fromager",:country_id=>44)
State.create(:id=>540, :name=>"Nʼzi-Comoé",:country_id=>44)
State.create(:id=>541, :name=>"Marahoué",:country_id=>44)
State.create(:id=>542, :name=>"Moyen-Cavally",:country_id=>44)
State.create(:id=>543, :name=>"Bafing",:country_id=>44)
State.create(:id=>544, :name=>"Valparaíso",:country_id=>46)
State.create(:id=>545, :name=>"Tarapacá",:country_id=>46)
State.create(:id=>546, :name=>"Región Metropolitana",:country_id=>46)
State.create(:id=>547, :name=>"Maule",:country_id=>46)
State.create(:id=>548, :name=>"Los Lagos",:country_id=>46)
State.create(:id=>549, :name=>"Libertador General <NAME>",:country_id=>46)
State.create(:id=>550, :name=>"Coquimbo",:country_id=>46)
State.create(:id=>551, :name=>"Bío-Bío",:country_id=>46)
State.create(:id=>552, :name=>"Atacama",:country_id=>46)
State.create(:id=>553, :name=>"Araucanía",:country_id=>46)
State.create(:id=>554, :name=>"Antofagasta",:country_id=>46)
State.create(:id=>555, :name=>"<NAME> <NAME>",:country_id=>46)
State.create(:id=>556, :name=>"Magallanes y Antá<NAME>",:country_id=>46)
State.create(:id=>557, :name=>"Región de Arica y Parinacota",:country_id=>46)
State.create(:id=>558, :name=>"Región de Los Ríos",:country_id=>46)
State.create(:id=>559, :name=>"South-West Province",:country_id=>47)
State.create(:id=>560, :name=>"South Region",:country_id=>47)
State.create(:id=>561, :name=>"West Region",:country_id=>47)
State.create(:id=>562, :name=>"North-West Region",:country_id=>47)
State.create(:id=>563, :name=>"North Province",:country_id=>47)
State.create(:id=>564, :name=>"Littoral Region",:country_id=>47)
State.create(:id=>565, :name=>"Far North Region",:country_id=>47)
State.create(:id=>566, :name=>"East Province",:country_id=>47)
State.create(:id=>567, :name=>"Centre Region",:country_id=>47)
State.create(:id=>568, :name=>"Adamaoua Region",:country_id=>47)
State.create(:id=>569, :name=>"Tibet Autonomous Region",:country_id=>48)
State.create(:id=>570, :name=>"<NAME>",:country_id=>48)
State.create(:id=>571, :name=>"<NAME>",:country_id=>48)
State.create(:id=>572, :name=>"<NAME>",:country_id=>48)
State.create(:id=>573, :name=>"Yunnan",:country_id=>48)
State.create(:id=>574, :name=>"<NAME>",:country_id=>48)
State.create(:id=>575, :name=>"Sichuan",:country_id=>48)
State.create(:id=>576, :name=>"<NAME>",:country_id=>48)
State.create(:id=>577, :name=>"<NAME>",:country_id=>48)
State.create(:id=>578, :name=>"<NAME>",:country_id=>48)
State.create(:id=>579, :name=>"<NAME>",:country_id=>48)
State.create(:id=>580, :name=>"<NAME>",:country_id=>48)
State.create(:id=>581, :name=>"Jiangxi",:country_id=>48)
State.create(:id=>582, :name=>"<NAME>",:country_id=>48)
State.create(:id=>583, :name=>"Hunan",:country_id=>48)
State.create(:id=>584, :name=>"Hubei",:country_id=>48)
State.create(:id=>585, :name=>"<NAME>",:country_id=>48)
State.create(:id=>586, :name=>"<NAME>",:country_id=>48)
State.create(:id=>587, :name=>"Hainan",:country_id=>48)
State.create(:id=>588, :name=>"<NAME>",:country_id=>48)
State.create(:id=>589, :name=>"<NAME>",:country_id=>48)
State.create(:id=>590, :name=>"<NAME>",:country_id=>48)
State.create(:id=>591, :name=>"<NAME>",:country_id=>48)
State.create(:id=>592, :name=>"Fujian",:country_id=>48)
State.create(:id=>593, :name=>"<NAME>",:country_id=>48)
State.create(:id=>594, :name=>"<NAME>",:country_id=>48)
State.create(:id=>595, :name=>"Inner Mongolia",:country_id=>48)
State.create(:id=>596, :name=>"Liaoning",:country_id=>48)
State.create(:id=>597, :name=>"<NAME>",:country_id=>48)
State.create(:id=>598, :name=>"<NAME>",:country_id=>48)
State.create(:id=>599, :name=>"Beijing",:country_id=>48)
State.create(:id=>600, :name=>"Vichada",:country_id=>49)
State.create(:id=>601, :name=>"Vaupés",:country_id=>49)
State.create(:id=>602, :name=>"<NAME>",:country_id=>49)
State.create(:id=>603, :name=>"Tolima",:country_id=>49)
State.create(:id=>604, :name=>"Sucre",:country_id=>49)
State.create(:id=>605, :name=>"Santander",:country_id=>49)
State.create(:id=>606, :name=>"Archipiélago de San Andrés, Providencia y Santa Catalina",:country_id=>49)
State.create(:id=>607, :name=>"Risaralda",:country_id=>49)
State.create(:id=>608, :name=>"Quindío",:country_id=>49)
State.create(:id=>609, :name=>"Putumayo",:country_id=>49)
State.create(:id=>610, :name=>"<NAME>",:country_id=>49)
State.create(:id=>611, :name=>"Nariño",:country_id=>49)
State.create(:id=>612, :name=>"Meta",:country_id=>49)
State.create(:id=>613, :name=>"Magdalena",:country_id=>49)
State.create(:id=>614, :name=>"<NAME>",:country_id=>49)
State.create(:id=>615, :name=>"Huila",:country_id=>49)
State.create(:id=>616, :name=>"Guaviare",:country_id=>49)
State.create(:id=>617, :name=>"Guainía",:country_id=>49)
State.create(:id=>618, :name=>"Cundinamarca",:country_id=>49)
State.create(:id=>619, :name=>"Córdoba",:country_id=>49)
State.create(:id=>620, :name=>"Chocó",:country_id=>49)
State.create(:id=>621, :name=>"Cesar",:country_id=>49)
State.create(:id=>622, :name=>"Cauca",:country_id=>49)
State.create(:id=>623, :name=>"Casanare",:country_id=>49)
State.create(:id=>624, :name=>"Caquetá",:country_id=>49)
State.create(:id=>625, :name=>"Caldas",:country_id=>49)
State.create(:id=>626, :name=>"Boyacá",:country_id=>49)
State.create(:id=>627, :name=>"Bolívar",:country_id=>49)
State.create(:id=>628, :name=>"Bogota D.C.",:country_id=>49)
State.create(:id=>629, :name=>"Atlántico",:country_id=>49)
State.create(:id=>630, :name=>"Arauca",:country_id=>49)
State.create(:id=>631, :name=>"Antioquia",:country_id=>49)
State.create(:id=>632, :name=>"Amazonas",:country_id=>49)
State.create(:id=>633, :name=>"San José",:country_id=>50)
State.create(:id=>634, :name=>"Puntarenas",:country_id=>50)
State.create(:id=>635, :name=>"Limón",:country_id=>50)
State.create(:id=>636, :name=>"Heredia",:country_id=>50)
State.create(:id=>637, :name=>"Guanacaste",:country_id=>50)
State.create(:id=>638, :name=>"Cartago",:country_id=>50)
State.create(:id=>639, :name=>"Alajuela",:country_id=>50)
State.create(:id=>640, :name=>"Villa Clara",:country_id=>51)
State.create(:id=>641, :name=>"Santiago de Cuba",:country_id=>51)
State.create(:id=>642, :name=>"Sancti Spíritus",:country_id=>51)
State.create(:id=>643, :name=>"Pinar del Río",:country_id=>51)
State.create(:id=>644, :name=>"Matanzas",:country_id=>51)
State.create(:id=>645, :name=>"Las Tunas",:country_id=>51)
State.create(:id=>646, :name=>"Isla de la Juventud",:country_id=>51)
State.create(:id=>647, :name=>"Holguín",:country_id=>51)
State.create(:id=>648, :name=>"Guantánamo",:country_id=>51)
State.create(:id=>649, :name=>"Granma",:country_id=>51)
State.create(:id=>650, :name=>"Ciudad de La Habana",:country_id=>51)
State.create(:id=>651, :name=>"Cienfuegos",:country_id=>51)
State.create(:id=>652, :name=>"<NAME>",:country_id=>51)
State.create(:id=>653, :name=>"Camagüey",:country_id=>51)
State.create(:id=>654, :name=>"<NAME>",:country_id=>51)
State.create(:id=>655, :name=>"<NAME>",:country_id=>51)
State.create(:id=>656, :name=>"Tarrafal",:country_id=>52)
State.create(:id=>657, :name=>"São Vicente",:country_id=>52)
State.create(:id=>658, :name=>"Santa Catarina",:country_id=>52)
State.create(:id=>659, :name=>"Sal",:country_id=>52)
State.create(:id=>660, :name=>"<NAME>",:country_id=>52)
State.create(:id=>661, :name=>"Praia",:country_id=>52)
State.create(:id=>662, :name=>"Paul",:country_id=>52)
State.create(:id=>663, :name=>"Maio",:country_id=>52)
State.create(:id=>664, :name=>"Brava",:country_id=>52)
State.create(:id=>665, :name=>"Boa Vista",:country_id=>52)
State.create(:id=>666, :name=>"Mosteiros",:country_id=>52)
State.create(:id=>667, :name=>"Santa Cruz",:country_id=>52)
State.create(:id=>668, :name=>"São Domingos",:country_id=>52)
State.create(:id=>669, :name=>"São Filipe",:country_id=>52)
State.create(:id=>670, :name=>"São Miguel",:country_id=>52)
State.create(:id=>671, :name=>"Concelho do Porto Novo",:country_id=>52)
State.create(:id=>672, :name=>"<NAME>",:country_id=>52)
State.create(:id=>673, :name=>"Concelho de Santa Catarina do Fogo",:country_id=>52)
State.create(:id=>674, :name=>"Concelho de São Salvador do Mundo",:country_id=>52)
State.create(:id=>675, :name=>"<NAME>",:country_id=>52)
State.create(:id=>676, :name=>"Concelho de <NAME> dos Orgãos",:country_id=>52)
State.create(:id=>677, :name=>"<NAME>",:country_id=>52)
State.create(:id=>678, :name=>"Paphos",:country_id=>55)
State.create(:id=>679, :name=>"Nicosia",:country_id=>55)
State.create(:id=>680, :name=>"Limassol",:country_id=>55)
State.create(:id=>681, :name=>"Larnaca",:country_id=>55)
State.create(:id=>682, :name=>"Kyrenia",:country_id=>55)
State.create(:id=>683, :name=>"Famagusta",:country_id=>55)
State.create(:id=>684, :name=>"Hlavní Mesto Praha",:country_id=>56)
State.create(:id=>685, :name=>"South Moravian Region",:country_id=>56)
State.create(:id=>686, :name=>"Jihočeský Kraj",:country_id=>56)
State.create(:id=>687, :name=>"Vysočina",:country_id=>56)
State.create(:id=>688, :name=>"Karlovarský Kraj",:country_id=>56)
State.create(:id=>689, :name=>"Královéhradecký Kraj",:country_id=>56)
State.create(:id=>690, :name=>"Liberecký Kraj",:country_id=>56)
State.create(:id=>691, :name=>"Olomoucký Kraj",:country_id=>56)
State.create(:id=>692, :name=>"Moravskoslezský Kraj",:country_id=>56)
State.create(:id=>693, :name=>"Pardubický Kraj",:country_id=>56)
State.create(:id=>694, :name=>"Plzeňský Kraj",:country_id=>56)
State.create(:id=>695, :name=>"Středočeský Kraj",:country_id=>56)
State.create(:id=>696, :name=>"Ústecký Kraj",:country_id=>56)
State.create(:id=>697, :name=>"Zlínský Kraj",:country_id=>56)
State.create(:id=>698, :name=>"Thuringia",:country_id=>57)
State.create(:id=>699, :name=>"Schleswig-Holstein",:country_id=>57)
State.create(:id=>700, :name=>"Saxony-Anhalt",:country_id=>57)
State.create(:id=>701, :name=>"Saxony",:country_id=>57)
State.create(:id=>702, :name=>"Saarland",:country_id=>57)
State.create(:id=>703, :name=>"Rhineland-Palatinate",:country_id=>57)
State.create(:id=>704, :name=>"North Rhine-Westphalia",:country_id=>57)
State.create(:id=>705, :name=>"Lower Saxony",:country_id=>57)
State.create(:id=>706, :name=>"Mecklenburg-Vorpommern",:country_id=>57)
State.create(:id=>707, :name=>"Hesse",:country_id=>57)
State.create(:id=>708, :name=>"Hamburg",:country_id=>57)
State.create(:id=>709, :name=>"Bremen",:country_id=>57)
State.create(:id=>710, :name=>"Brandenburg",:country_id=>57)
State.create(:id=>711, :name=>"Berlin",:country_id=>57)
State.create(:id=>712, :name=>"Bavaria",:country_id=>57)
State.create(:id=>713, :name=>"Baden-Württemberg",:country_id=>57)
State.create(:id=>714, :name=>"Tadjourah",:country_id=>58)
State.create(:id=>715, :name=>"Obock",:country_id=>58)
State.create(:id=>716, :name=>"Djibouti",:country_id=>58)
State.create(:id=>717, :name=>"Dikhil",:country_id=>58)
State.create(:id=>718, :name=>"<NAME>",:country_id=>58)
State.create(:id=>719, :name=>"Arta",:country_id=>58)
State.create(:id=>720, :name=>"Capital Region",:country_id=>59)
State.create(:id=>721, :name=>"Central Jutland",:country_id=>59)
State.create(:id=>722, :name=>"Region North Jutland",:country_id=>59)
State.create(:id=>723, :name=>"Zealand",:country_id=>59)
State.create(:id=>724, :name=>"South Denmark",:country_id=>59)
State.create(:id=>725, :name=>"<NAME>",:country_id=>60)
State.create(:id=>726, :name=>"<NAME>",:country_id=>60)
State.create(:id=>727, :name=>"Saint Patrick",:country_id=>60)
State.create(:id=>728, :name=>"Saint Mark",:country_id=>60)
State.create(:id=>729, :name=>"<NAME>",:country_id=>60)
State.create(:id=>730, :name=>"Saint Joseph",:country_id=>60)
State.create(:id=>731, :name=>"<NAME>",:country_id=>60)
State.create(:id=>732, :name=>"Saint George",:country_id=>60)
State.create(:id=>733, :name=>"<NAME>",:country_id=>60)
State.create(:id=>734, :name=>"<NAME>",:country_id=>60)
State.create(:id=>735, :name=>"Valverde",:country_id=>61)
State.create(:id=>736, :name=>"<NAME>",:country_id=>61)
State.create(:id=>737, :name=>"Santiago",:country_id=>61)
State.create(:id=>738, :name=>"<NAME>",:country_id=>61)
State.create(:id=>739, :name=>"<NAME>",:country_id=>61)
State.create(:id=>740, :name=>"<NAME>",:country_id=>61)
State.create(:id=>741, :name=>"<NAME>",:country_id=>61)
State.create(:id=>742, :name=>"Samaná",:country_id=>61)
State.create(:id=>743, :name=>"<NAME>",:country_id=>61)
State.create(:id=>744, :name=>"<NAME>",:country_id=>61)
State.create(:id=>745, :name=>"Peravia",:country_id=>61)
State.create(:id=>746, :name=>"Pedernales",:country_id=>61)
State.create(:id=>747, :name=>"Distrito Nacional",:country_id=>61)
State.create(:id=>748, :name=>"<NAME>",:country_id=>61)
State.create(:id=>749, :name=>"<NAME>",:country_id=>61)
State.create(:id=>750, :name=>"<NAME>",:country_id=>61)
State.create(:id=>751, :name=>"<NAME>",:country_id=>61)
State.create(:id=>752, :name=>"<NAME>",:country_id=>61)
State.create(:id=>753, :name=>"La Romana",:country_id=>61)
State.create(:id=>754, :name=>"La Altagracia",:country_id=>61)
State.create(:id=>755, :name=>"Independencia",:country_id=>61)
State.create(:id=>756, :name=>"<NAME>",:country_id=>61)
State.create(:id=>757, :name=>"Espaillat",:country_id=>61)
State.create(:id=>758, :name=>"<NAME>",:country_id=>61)
State.create(:id=>759, :name=>"<NAME>",:country_id=>61)
State.create(:id=>760, :name=>"Duarte",:country_id=>61)
State.create(:id=>761, :name=>"Dajabón",:country_id=>61)
State.create(:id=>762, :name=>"Barahona",:country_id=>61)
State.create(:id=>763, :name=>"Baoruco",:country_id=>61)
State.create(:id=>764, :name=>"Azua",:country_id=>61)
State.create(:id=>765, :name=>"<NAME>",:country_id=>61)
State.create(:id=>766, :name=>"<NAME>",:country_id=>61)
State.create(:id=>767, :name=>"Tlemcen",:country_id=>62)
State.create(:id=>768, :name=>"<NAME>",:country_id=>62)
State.create(:id=>769, :name=>"Tissemsilt",:country_id=>62)
State.create(:id=>770, :name=>"Tipaza",:country_id=>62)
State.create(:id=>771, :name=>"Tindouf",:country_id=>62)
State.create(:id=>772, :name=>"Tiaret",:country_id=>62)
State.create(:id=>773, :name=>"Tébessa",:country_id=>62)
State.create(:id=>774, :name=>"Tamanghasset",:country_id=>62)
State.create(:id=>775, :name=>"<NAME>",:country_id=>62)
State.create(:id=>776, :name=>"Skikda",:country_id=>62)
State.create(:id=>777, :name=>"<NAME>",:country_id=>62)
State.create(:id=>778, :name=>"Sétif",:country_id=>62)
State.create(:id=>779, :name=>"Saïda",:country_id=>62)
State.create(:id=>780, :name=>"Relizane",:country_id=>62)
State.create(:id=>781, :name=>"<NAME>",:country_id=>62)
State.create(:id=>782, :name=>"Ouargla",:country_id=>62)
State.create(:id=>783, :name=>"Oran",:country_id=>62)
State.create(:id=>784, :name=>"Naama النعامة",:country_id=>62)
State.create(:id=>785, :name=>"Mʼsila",:country_id=>62)
State.create(:id=>786, :name=>"Mostaganem",:country_id=>62)
State.create(:id=>787, :name=>"Mila",:country_id=>62)
State.create(:id=>788, :name=>"Médéa",:country_id=>62)
State.create(:id=>789, :name=>"Mascara",:country_id=>62)
State.create(:id=>790, :name=>"Laghouat",:country_id=>62)
State.create(:id=>791, :name=>"Khenchela",:country_id=>62)
State.create(:id=>792, :name=>"Jijel",:country_id=>62)
State.create(:id=>793, :name=>"Illizi",:country_id=>62)
State.create(:id=>794, :name=>"Guelma",:country_id=>62)
State.create(:id=>795, :name=>"Ghardaïa",:country_id=>62)
State.create(:id=>796, :name=>"<NAME>",:country_id=>62)
State.create(:id=>797, :name=>"<NAME>",:country_id=>62)
State.create(:id=>798, :name=>"<NAME>",:country_id=>62)
State.create(:id=>799, :name=>"Djelfa",:country_id=>62)
State.create(:id=>800, :name=>"Constantine",:country_id=>62)
State.create(:id=>801, :name=>"Chlef",:country_id=>62)
State.create(:id=>802, :name=>"Boumerdes",:country_id=>62)
State.create(:id=>803, :name=>"Bouira",:country_id=>62)
State.create(:id=>804, :name=>"<NAME>",:country_id=>62)
State.create(:id=>805, :name=>"Blida",:country_id=>62)
State.create(:id=>806, :name=>"Biskra",:country_id=>62)
State.create(:id=>807, :name=>"Bejaïa",:country_id=>62)
State.create(:id=>808, :name=>"Béchar",:country_id=>62)
State.create(:id=>809, :name=>"Batna",:country_id=>62)
State.create(:id=>810, :name=>"Annaba",:country_id=>62)
State.create(:id=>811, :name=>"Alger",:country_id=>62)
State.create(:id=>812, :name=>"<NAME>",:country_id=>62)
State.create(:id=>813, :name=>"<NAME>",:country_id=>62)
State.create(:id=>814, :name=>"Adrar",:country_id=>62)
State.create(:id=>815, :name=>"Zamora-Chinchipe",:country_id=>63)
State.create(:id=>816, :name=>"Tungurahua",:country_id=>63)
State.create(:id=>817, :name=>"Pichincha",:country_id=>63)
State.create(:id=>818, :name=>"Pastaza",:country_id=>63)
State.create(:id=>819, :name=>"Napo",:country_id=>63)
State.create(:id=>820, :name=>"Morona-Santiago",:country_id=>63)
State.create(:id=>821, :name=>"Manabí",:country_id=>63)
State.create(:id=>822, :name=>"<NAME>",:country_id=>63)
State.create(:id=>823, :name=>"Loja",:country_id=>63)
State.create(:id=>824, :name=>"Imbabura",:country_id=>63)
State.create(:id=>825, :name=>"Guayas",:country_id=>63)
State.create(:id=>826, :name=>"Galápagos",:country_id=>63)
State.create(:id=>827, :name=>"Esmeraldas",:country_id=>63)
State.create(:id=>828, :name=>"<NAME>",:country_id=>63)
State.create(:id=>829, :name=>"Cotopaxi",:country_id=>63)
State.create(:id=>830, :name=>"Chimborazo",:country_id=>63)
State.create(:id=>831, :name=>"Carchi",:country_id=>63)
State.create(:id=>832, :name=>"Cañar",:country_id=>63)
State.create(:id=>833, :name=>"Bolívar",:country_id=>63)
State.create(:id=>834, :name=>"Azuay",:country_id=>63)
State.create(:id=>835, :name=>"Sucumbios",:country_id=>63)
State.create(:id=>836, :name=>"Orellana",:country_id=>63)
State.create(:id=>837, :name=>"Santo Domingo de los Tsáchilas",:country_id=>63)
State.create(:id=>838, :name=>"Santa Elena",:country_id=>63)
State.create(:id=>839, :name=>"Võrumaa",:country_id=>64)
State.create(:id=>840, :name=>"Viljandimaa",:country_id=>64)
State.create(:id=>841, :name=>"Valgamaa",:country_id=>64)
State.create(:id=>842, :name=>"Tartumaa",:country_id=>64)
State.create(:id=>843, :name=>"Saaremaa",:country_id=>64)
State.create(:id=>844, :name=>"Raplamaa",:country_id=>64)
State.create(:id=>845, :name=>"Põlvamaa",:country_id=>64)
State.create(:id=>846, :name=>"Pärnumaa",:country_id=>64)
State.create(:id=>847, :name=>"Lääne-Virumaa",:country_id=>64)
State.create(:id=>848, :name=>"Läänemaa",:country_id=>64)
State.create(:id=>849, :name=>"Jõgevamaa",:country_id=>64)
State.create(:id=>850, :name=>"Järvamaa",:country_id=>64)
State.create(:id=>851, :name=>"Ida-Virumaa",:country_id=>64)
State.create(:id=>852, :name=>"Hiiumaa",:country_id=>64)
State.create(:id=>853, :name=>"Harjumaa",:country_id=>64)
State.create(:id=>854, :name=>"Sūhāj",:country_id=>65)
State.create(:id=>855, :name=>"Shamāl Sīnāʼ",:country_id=>65)
State.create(:id=>856, :name=>"Qinā",:country_id=>65)
State.create(:id=>857, :name=>"Maţrūḩ",:country_id=>65)
State.create(:id=>858, :name=>"Kafr ash Shaykh",:country_id=>65)
State.create(:id=>859, :name=>"Janūb Sīnāʼ",:country_id=>65)
State.create(:id=>860, :name=>"Dumyāţ",:country_id=>65)
State.create(:id=>861, :name=>"Būr Sa‘īd",:country_id=>65)
State.create(:id=>862, :name=>"Banī Suwayf",:country_id=>65)
State.create(:id=>863, :name=>"Asyūţ",:country_id=>65)
State.create(:id=>864, :name=>"Aswān",:country_id=>65)
State.create(:id=>865, :name=>"As Suways",:country_id=>65)
State.create(:id=>866, :name=>"Eastern Province",:country_id=>65)
State.create(:id=>867, :name=>"Al Wādī al Jadīd",:country_id=>65)
State.create(:id=>868, :name=>"Al Qalyūbīyah",:country_id=>65)
State.create(:id=>869, :name=>"Al Qāhirah",:country_id=>65)
State.create(:id=>870, :name=>"Al Minyā",:country_id=>65)
State.create(:id=>871, :name=>"Al Minūfīyah",:country_id=>65)
State.create(:id=>872, :name=>"Al Jīzah",:country_id=>65)
State.create(:id=>873, :name=>"Al Ismā‘īlīyah",:country_id=>65)
State.create(:id=>874, :name=>"Alexandria",:country_id=>65)
State.create(:id=>875, :name=>"Al Gharbīyah",:country_id=>65)
State.create(:id=>876, :name=>"Al Fayyūm",:country_id=>65)
State.create(:id=>877, :name=>"Al Buḩayrah",:country_id=>65)
State.create(:id=>878, :name=>"Al Baḩr al Aḩmar",:country_id=>65)
State.create(:id=>879, :name=>"Ad Daqahlīyah",:country_id=>65)
State.create(:id=>880, :name=>"Muḩāfaz̧at al Uqşur",:country_id=>65)
State.create(:id=>881, :name=>"<NAME>",:country_id=>66)
State.create(:id=>882, :name=>"Ānseba",:country_id=>67)
State.create(:id=>883, :name=>"Debub",:country_id=>67)
State.create(:id=>884, :name=>"Debubawī Kʼeyih Bahrī",:country_id=>67)
State.create(:id=>885, :name=>"<NAME>",:country_id=>67)
State.create(:id=>886, :name=>"Maʼākel",:country_id=>67)
State.create(:id=>887, :name=>"<NAME>",:country_id=>67)
State.create(:id=>888, :name=>"Murcia",:country_id=>68)
State.create(:id=>889, :name=>"Ceuta",:country_id=>68)
State.create(:id=>890, :name=>"Balearic Islands",:country_id=>68)
State.create(:id=>891, :name=>"Andalusia",:country_id=>68)
State.create(:id=>892, :name=>"Canary Islands",:country_id=>68)
State.create(:id=>893, :name=>"Castille-La Mancha",:country_id=>68)
State.create(:id=>894, :name=>"Extremadura",:country_id=>68)
State.create(:id=>895, :name=>"Valencia",:country_id=>68)
State.create(:id=>896, :name=>"Asturias",:country_id=>68)
State.create(:id=>897, :name=>"Navarre",:country_id=>68)
State.create(:id=>898, :name=>"Madrid",:country_id=>68)
State.create(:id=>899, :name=>"La Rioja",:country_id=>68)
State.create(:id=>900, :name=>"Cantabria",:country_id=>68)
State.create(:id=>901, :name=>"Aragon",:country_id=>68)
State.create(:id=>902, :name=>"<NAME>",:country_id=>68)
State.create(:id=>903, :name=>"Catalonia",:country_id=>68)
State.create(:id=>904, :name=>"Galicia",:country_id=>68)
State.create(:id=>905, :name=>"Basque Country",:country_id=>68)
State.create(:id=>906, :name=>"Melilla",:country_id=>68)
State.create(:id=>907, :name=>"Ādīs Ābeba",:country_id=>69)
State.create(:id=>908, :name=>"Afar Regional State",:country_id=>69)
State.create(:id=>909, :name=>"The State of Amhara",:country_id=>69)
State.create(:id=>910, :name=>"Beneshangual Gumz Regional State",:country_id=>69)
State.create(:id=>911, :name=>"Dire Dawa Region",:country_id=>69)
State.create(:id=>912, :name=>"Gambela",:country_id=>69)
State.create(:id=>913, :name=>"The State of Harari People",:country_id=>69)
State.create(:id=>914, :name=>"Oromia Regional State",:country_id=>69)
State.create(:id=>915, :name=>"Somalia Regional State",:country_id=>69)
State.create(:id=>916, :name=>"Tigray Regional State",:country_id=>69)
State.create(:id=>917, :name=>"SSNPR",:country_id=>69)
State.create(:id=>918, :name=>"Ilubabor",:country_id=>69)
State.create(:id=>919, :name=>"Oulu",:country_id=>70)
State.create(:id=>920, :name=>"Lapponia",:country_id=>70)
State.create(:id=>921, :name=>"Southern Finland",:country_id=>70)
State.create(:id=>922, :name=>"Province of Eastern Finland",:country_id=>70)
State.create(:id=>923, :name=>"Western Finland",:country_id=>70)
State.create(:id=>924, :name=>"Western",:country_id=>71)
State.create(:id=>925, :name=>"Northern",:country_id=>71)
State.create(:id=>926, :name=>"Central",:country_id=>71)
State.create(:id=>927, :name=>"Eastern",:country_id=>71)
State.create(:id=>928, :name=>"Rotuma",:country_id=>71)
State.create(:id=>929, :name=>"Yap",:country_id=>73)
State.create(:id=>930, :name=>"Pohnpei",:country_id=>73)
State.create(:id=>931, :name=>"Kosrae",:country_id=>73)
State.create(:id=>932, :name=>"Chuuk",:country_id=>73)
State.create(:id=>933, :name=>"Vágar",:country_id=>74)
State.create(:id=>934, :name=>"Suðuroy",:country_id=>74)
State.create(:id=>935, :name=>"Streymoy",:country_id=>74)
State.create(:id=>936, :name=>"Sandoy",:country_id=>74)
State.create(:id=>937, :name=>"Norðoyar",:country_id=>74)
State.create(:id=>938, :name=>"Eysturoy",:country_id=>74)
State.create(:id=>939, :name=>"Rhône-Alpes",:country_id=>75)
State.create(:id=>940, :name=>"Provence-Alpes-Côte d'Azur",:country_id=>75)
State.create(:id=>941, :name=>"Poitou-Charentes",:country_id=>75)
State.create(:id=>942, :name=>"Picardie",:country_id=>75)
State.create(:id=>943, :name=>"Pays de la Loire",:country_id=>75)
State.create(:id=>944, :name=>"Nord-Pas-de-Calais",:country_id=>75)
State.create(:id=>945, :name=>"Midi-Pyrénées",:country_id=>75)
State.create(:id=>946, :name=>"Lorraine",:country_id=>75)
State.create(:id=>947, :name=>"Limousin",:country_id=>75)
State.create(:id=>948, :name=>"Languedoc-Roussillon",:country_id=>75)
State.create(:id=>949, :name=>"Île-de-France",:country_id=>75)
State.create(:id=>950, :name=>"Haute-Normandie",:country_id=>75)
State.create(:id=>951, :name=>"Franche-Comté",:country_id=>75)
State.create(:id=>952, :name=>"Corsica",:country_id=>75)
State.create(:id=>953, :name=>"Champagne-Ardenne",:country_id=>75)
State.create(:id=>954, :name=>"Centre",:country_id=>75)
State.create(:id=>955, :name=>"Brittany",:country_id=>75)
State.create(:id=>956, :name=>"Bourgogne",:country_id=>75)
State.create(:id=>957, :name=>"Basse-Normandie",:country_id=>75)
State.create(:id=>958, :name=>"Auvergne",:country_id=>75)
State.create(:id=>959, :name=>"Aquitaine",:country_id=>75)
State.create(:id=>960, :name=>"Alsace",:country_id=>75)
State.create(:id=>961, :name=>"Woleu-Ntem",:country_id=>76)
State.create(:id=>962, :name=>"Ogooué-Maritime",:country_id=>76)
State.create(:id=>963, :name=>"Ogooué-Lolo",:country_id=>76)
State.create(:id=>964, :name=>"Ogooué-Ivindo",:country_id=>76)
State.create(:id=>965, :name=>"Nyanga",:country_id=>76)
State.create(:id=>966, :name=>"Ngounié",:country_id=>76)
State.create(:id=>967, :name=>"Moyen-Ogooué",:country_id=>76)
State.create(:id=>968, :name=>"Haut-Ogooué",:country_id=>76)
State.create(:id=>969, :name=>"Estuaire",:country_id=>76)
State.create(:id=>970, ,:name=>"Wales",:country_id=>77)
State.create(:id=>971, ,:name=>"Scotland",:country_id=>77)
State.create(:id=>972, ,:name=>"N Ireland",:country_id=>77)
State.create(:id=>973, ,:name=>"England",:country_id=>77)
State.create(:id=>974, :name=>"<NAME>",:country_id=>78)
State.create(:id=>975, :name=>"<NAME>",:country_id=>78)
State.create(:id=>976, :name=>"<NAME>",:country_id=>78)
State.create(:id=>977, :name=>"<NAME>",:country_id=>78)
State.create(:id=>978, :name=>"<NAME>",:country_id=>78)
State.create(:id=>979, :name=>"<NAME>",:country_id=>78)
State.create(:id=>980, :name=>"Carriacou and <NAME>",:country_id=>78)
State.create(:id=>981, :name=>"T'bilisi",:country_id=>79)
State.create(:id=>982, :name=>"Ajaria",:country_id=>79)
State.create(:id=>983, :name=>"<NAME>",:country_id=>79)
State.create(:id=>984, :name=>"Kakheti",:country_id=>79)
State.create(:id=>985, :name=>"Guria",:country_id=>79)
State.create(:id=>986, :name=>"Imereti",:country_id=>79)
State.create(:id=>987, :name=>"<NAME>",:country_id=>79)
State.create(:id=>988, :name=>"Mtskheta-Mtianeti",:country_id=>79)
State.create(:id=>989, :name=>"Racha-Lechkhumi and <NAME>",:country_id=>79)
State.create(:id=>990, :name=>"Samegrelo and <NAME>",:country_id=>79)
State.create(:id=>991, :name=>"Samtskhe-Javakheti",:country_id=>79)
State.create(:id=>992, :name=>"Abkhazia",:country_id=>79)
State.create(:id=>993, :name=>"Guyane",:country_id=>80)
State.create(:id=>994, :name=>"Western",:country_id=>82)
State.create(:id=>995, :name=>"Volta",:country_id=>82)
State.create(:id=>996, :name=>"Upper West",:country_id=>82)
State.create(:id=>997, :name=>"Upper East",:country_id=>82)
State.create(:id=>998, :name=>"Northern",:country_id=>82)
State.create(:id=>999, :name=>"Greater Accra",:country_id=>82)
State.create(:id=>1000, :name=>"Eastern",:country_id=>82)
State.create(:id=>1001, :name=>"Central",:country_id=>82)
State.create(:id=>1002, :name=>"Brong-Ahafo",:country_id=>82)
State.create(:id=>1003, :name=>"Ashanti",:country_id=>82)
State.create(:id=>1004, :name=>"Qaasuitsup",:country_id=>84)
State.create(:id=>1005, :name=>"Kujalleq",:country_id=>84)
State.create(:id=>1006, :name=>"Qeqqata",:country_id=>84)
State.create(:id=>1007, :name=>"Sermersooq",:country_id=>84)
State.create(:id=>1008, :name=>"Western",:country_id=>85)
State.create(:id=>1009, :name=>"Upper River",:country_id=>85)
State.create(:id=>1010, :name=>"North Bank",:country_id=>85)
State.create(:id=>1011, :name=>"Central River",:country_id=>85)
State.create(:id=>1012, :name=>"Lower River",:country_id=>85)
State.create(:id=>1013, :name=>"Banjul",:country_id=>85)
State.create(:id=>1014, :name=>"Yomou",:country_id=>86)
State.create(:id=>1015, :name=>"Tougué",:country_id=>86)
State.create(:id=>1016, :name=>"Télimélé",:country_id=>86)
State.create(:id=>1017, :name=>"Siguiri",:country_id=>86)
State.create(:id=>1018, :name=>"Pita",:country_id=>86)
State.create(:id=>1019, :name=>"Nzérékoré",:country_id=>86)
State.create(:id=>1020, :name=>"Mamou",:country_id=>86)
State.create(:id=>1021, :name=>"Mali",:country_id=>86)
State.create(:id=>1022, :name=>"Macenta",:country_id=>86)
State.create(:id=>1023, :name=>"Labé",:country_id=>86)
State.create(:id=>1024, :name=>"Kouroussa",:country_id=>86)
State.create(:id=>1025, :name=>"Koundara",:country_id=>86)
State.create(:id=>1026, :name=>"Kissidougou",:country_id=>86)
State.create(:id=>1027, :name=>"Kindia",:country_id=>86)
State.create(:id=>1028, :name=>"Kérouané",:country_id=>86)
State.create(:id=>1029, :name=>"Kankan",:country_id=>86)
State.create(:id=>1030, :name=>"Guéckédou",:country_id=>86)
State.create(:id=>1031, :name=>"Gaoual",:country_id=>86)
State.create(:id=>1032, :name=>"Fria",:country_id=>86)
State.create(:id=>1033, :name=>"Forécariah",:country_id=>86)
State.create(:id=>1034, :name=>"Faranah",:country_id=>86)
State.create(:id=>1035, :name=>"Dubréka",:country_id=>86)
State.create(:id=>1036, :name=>"Dinguiraye",:country_id=>86)
State.create(:id=>1037, :name=>"Dalaba",:country_id=>86)
State.create(:id=>1038, :name=>"Dabola",:country_id=>86)
State.create(:id=>1039, :name=>"Conakry",:country_id=>86)
State.create(:id=>1040, :name=>"Boké",:country_id=>86)
State.create(:id=>1041, :name=>"Boffa",:country_id=>86)
State.create(:id=>1042, :name=>"Beyla",:country_id=>86)
State.create(:id=>1043, :name=>"Coyah",:country_id=>86)
State.create(:id=>1044, :name=>"Koubia",:country_id=>86)
State.create(:id=>1045, :name=>"Lélouma",:country_id=>86)
State.create(:id=>1046, :name=>"Lola",:country_id=>86)
State.create(:id=>1047, :name=>"Mandiana",:country_id=>86)
State.create(:id=>1048, :name=>"Guadeloupe",:country_id=>87)
State.create(:id=>1049, :name=>"Annobón",:country_id=>88)
State.create(:id=>1050, :name=>"<NAME>",:country_id=>88)
State.create(:id=>1051, :name=>"<NAME>",:country_id=>88)
State.create(:id=>1052, :name=>"<NAME>",:country_id=>88)
State.create(:id=>1053, :name=>"Kié-Ntem",:country_id=>88)
State.create(:id=>1054, :name=>"Litoral",:country_id=>88)
State.create(:id=>1055, :name=>"Wele-Nzas",:country_id=>88)
State.create(:id=>1056, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1057, :name=>"Attiki",:country_id=>89)
State.create(:id=>1058, :name=>"Central Greece",:country_id=>89)
State.create(:id=>1059, :name=>"<NAME>onia",:country_id=>89)
State.create(:id=>1060, :name=>"Kriti",:country_id=>89)
State.create(:id=>1061, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1062, :name=>"Ipeiros",:country_id=>89)
State.create(:id=>1063, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1064, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1065, :name=>"Pelopon", :country_id=>89)
State.create(:id=>1066, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1067, :name=>"Thessalia",:country_id=>89)
State.create(:id=>1068, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1069, :name=>"<NAME>",:country_id=>89)
State.create(:id=>1070, :name=>"Zacapa",:country_id=>91)
State.create(:id=>1071, :name=>"Totonicapán",:country_id=>91)
State.create(:id=>1072, :name=>"Suchitepéquez",:country_id=>91)
State.create(:id=>1073, :name=>"Sololá",:country_id=>91)
State.create(:id=>1074, :name=>"<NAME>",:country_id=>91)
State.create(:id=>1075, :name=>"<NAME>",:country_id=>91)
State.create(:id=>1076, :name=>"Sacatepéquez",:country_id=>91)
State.create(:id=>1077, :name=>"Retalhuleu",:country_id=>91)
State.create(:id=>1078, :name=>"Quiché",:country_id=>91)
State.create(:id=>1079, :name=>"Quetzaltenango",:country_id=>91)
State.create(:id=>1080, :name=>"Petén",:country_id=>91)
State.create(:id=>1081, :name=>"Jutiapa",:country_id=>91)
State.create(:id=>1082, :name=>"Jalapa",:country_id=>91)
State.create(:id=>1083, :name=>"Izabal",:country_id=>91)
State.create(:id=>1084, :name=>"Huehuetenango",:country_id=>91)
State.create(:id=>1085, :name=>"Guatemala",:country_id=>91)
State.create(:id=>1086, :name=>"Escuintla",:country_id=>91)
State.create(:id=>1087, :name=>"<NAME>",:country_id=>91)
State.create(:id=>1088, :name=>"Chiquimula",:country_id=>91)
State.create(:id=>1089, :name=>"Chimaltenango",:country_id=>91)
State.create(:id=>1090, :name=>"<NAME>",:country_id=>91)
State.create(:id=>1091, :name=>"<NAME>",:country_id=>91)
State.create(:id=>1092, :name=>"Piti Municipality",:country_id=>92)
State.create(:id=>1093, :name=>"Santa Rita Municipality",:country_id=>92)
State.create(:id=>1094, :name=>"Sinajana Municipality",:country_id=>92)
State.create(:id=>1095, :name=>"Talofofo Municipality",:country_id=>92)
State.create(:id=>1096, :name=>"Tamuning-Tumon-Harmon Municipality",:country_id=>92)
State.create(:id=>1097, :name=>"Umatac Municipality",:country_id=>92)
State.create(:id=>1098, :name=>"Yigo Municipality",:country_id=>92)
State.create(:id=>1099, :name=>"Yona Municipality",:country_id=>92)
State.create(:id=>1100, :name=>"Merizo Municipality",:country_id=>92)
State.create(:id=>1101, :name=>"Mangilao Municipality",:country_id=>92)
State.create(:id=>1102, :name=>"Agana Heights Municipality",:country_id=>92)
State.create(:id=>1103, :name=>"Chalan Pago-Ordot Municipality",:country_id=>92)
State.create(:id=>1104, :name=>"Asan-Maina Municipality",:country_id=>92)
State.create(:id=>1105, :name=>"Agat Municipality",:country_id=>92)
State.create(:id=>1106, :name=>"Dededo Municipality",:country_id=>92)
State.create(:id=>1107, :name=>"Barrigada Municipality",:country_id=>92)
State.create(:id=>1108, :name=>"Hagatna Municipality",:country_id=>92)
State.create(:id=>1109, :name=>"Inarajan Municipality",:country_id=>92)
State.create(:id=>1110, :name=>"Mongmong-Toto-Maite Municipality",:country_id=>92)
State.create(:id=>1111, :name=>"Tombali",:country_id=>93)
State.create(:id=>1112, :name=>"Quinara",:country_id=>93)
State.create(:id=>1113, :name=>"Oio",:country_id=>93)
State.create(:id=>1114, :name=>"Gabú",:country_id=>93)
State.create(:id=>1115, :name=>"Cacheu",:country_id=>93)
State.create(:id=>1116, :name=>"Bolama and Bijagos",:country_id=>93)
State.create(:id=>1117, :name=>"Bissau Autonomous Region",:country_id=>93)
State.create(:id=>1118, :name=>"Biombo",:country_id=>93)
State.create(:id=>1119, :name=>"Bafatá",:country_id=>93)
State.create(:id=>1120, :name=>"Upper Takutu-Upper Essequibo",:country_id=>94)
State.create(:id=>1121, :name=>"Upper Demerara-Berbice",:country_id=>94)
State.create(:id=>1122, :name=>"Potaro-Siparuni",:country_id=>94)
State.create(:id=>1123, :name=>"Pomeroon-Supenaam",:country_id=>94)
State.create(:id=>1124, :name=>"Mahaica-Berbice",:country_id=>94)
State.create(:id=>1125, :name=>"Essequibo Islands-West Demerara",:country_id=>94)
State.create(:id=>1126, :name=>"East Berbice-Corentyne",:country_id=>94)
State.create(:id=>1127, :name=>"Demerara-Mahaica",:country_id=>94)
State.create(:id=>1128, :name=>"Cuyuni-Mazaruni",:country_id=>94)
State.create(:id=>1129, :name=>"Barima-Waini",:country_id=>94)
State.create(:id=>1130, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1131, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1132, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1133, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1134, ,:name=>"Islands",:country_id=>95)
State.create(:id=>1135, ,:name=>"Central and Western",:country_id=>95)
State.create(:id=>1136, ,:name=>"Wan Chai",:country_id=>95)
State.create(:id=>1137, ,:name=>"Eastern",:country_id=>95)
State.create(:id=>1138, ,:name=>"Southern",:country_id=>95)
State.create(:id=>1139, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1140, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1141, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1142, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1143, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1144, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1145, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1146, ,:name=>"North",:country_id=>95)
State.create(:id=>1147, ,:name=>"<NAME>",:country_id=>95)
State.create(:id=>1148, :name=>"Yoro",:country_id=>97)
State.create(:id=>1149, :name=>"Valle",:country_id=>97)
State.create(:id=>1150, :name=>"<NAME>",:country_id=>97)
State.create(:id=>1151, :name=>"Olancho",:country_id=>97)
State.create(:id=>1152, :name=>"Ocotepeque",:country_id=>97)
State.create(:id=>1153, :name=>"Lempira",:country_id=>97)
State.create(:id=>1154, :name=>"<NAME>",:country_id=>97)
State.create(:id=>1155, :name=>"<NAME>",:country_id=>97)
State.create(:id=>1156, :name=>"Intibucá",:country_id=>97)
State.create(:id=>1157, :name=>"<NAME>",:country_id=>97)
State.create(:id=>1158, :name=>"<NAME>",:country_id=>97)
State.create(:id=>1159, :name=>"El Paraíso",:country_id=>97)
State.create(:id=>1160, :name=>"Cortés",:country_id=>97)
State.create(:id=>1161, :name=>"Copán",:country_id=>97)
State.create(:id=>1162, :name=>"Comayagua",:country_id=>97)
State.create(:id=>1163, :name=>"Colón",:country_id=>97)
State.create(:id=>1164, :name=>"Choluteca",:country_id=>97)
State.create(:id=>1165, :name=>"Atlántida",:country_id=>97)
State.create(:id=>1166, :name=>"Bjelovarsko-Bilogorska",:country_id=>98)
State.create(:id=>1167, :name=>"Brodsko-Posavska",:country_id=>98)
State.create(:id=>1168, :name=>"Dubrovačko-Neretvanska",:country_id=>98)
State.create(:id=>1169, :name=>"Istarska",:country_id=>98)
State.create(:id=>1170, :name=>"Karlovačka",:country_id=>98)
State.create(:id=>1171, :name=>"Koprivničko-Križevačka",:country_id=>98)
State.create(:id=>1172, :name=>"Krapinsko-Zagorska",:country_id=>98)
State.create(:id=>1173, :name=>"Ličko-Senjska",:country_id=>98)
State.create(:id=>1174, :name=>"Međimurska",:country_id=>98)
State.create(:id=>1175, :name=>"Osječko-Baranjska",:country_id=>98)
State.create(:id=>1176, :name=>"Požeško-Slavonska",:country_id=>98)
State.create(:id=>1177, :name=>"Primorsko-Goranska",:country_id=>98)
State.create(:id=>1178, :name=>"Šibensko-Kniniska",:country_id=>98)
State.create(:id=>1179, :name=>"Sisačko-Moslavačka",:country_id=>98)
State.create(:id=>1180, :name=>"Splitsko-Dalmatinska",:country_id=>98)
State.create(:id=>1181, :name=>"Varaždinska",:country_id=>98)
State.create(:id=>1182, :name=>"Vukovarsko-Srijemska",:country_id=>98)
State.create(:id=>1183, :name=>"Zadarska",:country_id=>98)
State.create(:id=>1184, :name=>"Zagrebačka",:country_id=>98)
State.create(:id=>1185, :name=>"<NAME>",:country_id=>98)
State.create(:id=>1186, :name=>"Virovitičk-Podravska",:country_id=>98)
State.create(:id=>1187, :name=>"Sud-Est",:country_id=>99)
State.create(:id=>1188, :name=>"Sud",:country_id=>99)
State.create(:id=>1189, :name=>"Ouest",:country_id=>99)
State.create(:id=>1190, :name=>"Nord-Ouest",:country_id=>99)
State.create(:id=>1191, :name=>"Nord-Est",:country_id=>99)
State.create(:id=>1192, :name=>"Nord",:country_id=>99)
State.create(:id=>1193, :name=>"GrandʼAnse",:country_id=>99)
State.create(:id=>1194, :name=>"Centre",:country_id=>99)
State.create(:id=>1195, :name=>"Artibonite",:country_id=>99)
State.create(:id=>1196, :name=>"Nippes",:country_id=>99)
State.create(:id=>1197, :name=>"Szabolcs-Szatmár-Bereg",:country_id=>100)
State.create(:id=>1198, :name=>"Jász-Nagykun-Szolnok",:country_id=>100)
State.create(:id=>1199, :name=>"Heves",:country_id=>100)
State.create(:id=>1200, :name=>"Hajdú-Bihar",:country_id=>100)
State.create(:id=>1201, :name=>"Csongrád",:country_id=>100)
State.create(:id=>1202, :name=>"Borsod-Abaúj-Zemplén",:country_id=>100)
State.create(:id=>1203, :name=>"Békés",:country_id=>100)
State.create(:id=>1204, :name=>"Zala",:country_id=>100)
State.create(:id=>1205, :name=>"Veszprém",:country_id=>100)
State.create(:id=>1206, :name=>"Vas",:country_id=>100)
State.create(:id=>1207, :name=>"Tolna",:country_id=>100)
State.create(:id=>1208, :name=>"Somogy",:country_id=>100)
State.create(:id=>1209, :name=>"Pest",:country_id=>100)
State.create(:id=>1210, :name=>"Nógrád",:country_id=>100)
State.create(:id=>1211, :name=>"Komárom-Esztergom",:country_id=>100)
State.create(:id=>1212, :name=>"Győr-Moson-Sopron",:country_id=>100)
State.create(:id=>1213, :name=>"Fejér",:country_id=>100)
State.create(:id=>1214, :name=>"Budapest",:country_id=>100)
State.create(:id=>1215, :name=>"Baranya",:country_id=>100)
State.create(:id=>1216, :name=>"Bács-Kiskun",:country_id=>100)
State.create(:id=>1217, :name=>"North Sumatra",:country_id=>101)
State.create(:id=>1218, :name=>"Aceh",:country_id=>101)
State.create(:id=>1219, :name=>"<NAME>",:country_id=>101)
State.create(:id=>1220, :name=>"South Sumatra",:country_id=>101)
State.create(:id=>1221, :name=>"West Sumatra",:country_id=>101)
State.create(:id=>1222, :name=>"North Sulawesi",:country_id=>101)
State.create(:id=>1223, :name=>"Sulawesi Tenggara",:country_id=>101)
State.create(:id=>1224, :name=>"Central Sulawesi",:country_id=>101)
State.create(:id=>1225, :name=>"South Sulawesi",:country_id=>101)
State.create(:id=>1226, :name=>"Riau",:country_id=>101)
State.create(:id=>1227, :name=>"East Nusa Tenggara",:country_id=>101)
State.create(:id=>1228, :name=>"Nusa Tenggara Barat",:country_id=>101)
State.create(:id=>1229, :name=>"Maluku",:country_id=>101)
State.create(:id=>1230, :name=>"Lampung",:country_id=>101)
State.create(:id=>1231, :name=>"East Kalimantan",:country_id=>101)
State.create(:id=>1232, :name=>"Kalimantan Tengah",:country_id=>101)
State.create(:id=>1233, :name=>"South Kalimantan",:country_id=>101)
State.create(:id=>1234, :name=>"West Kalimantan",:country_id=>101)
State.create(:id=>1235, :name=>"East Java",:country_id=>101)
State.create(:id=>1236, :name=>"Central Java",:country_id=>101)
State.create(:id=>1237, :name=>"West Java",:country_id=>101)
State.create(:id=>1238, :name=>"Jambi",:country_id=>101)
State.create(:id=>1239, :name=>"Jakarta Raya",:country_id=>101)
State.create(:id=>1240, :name=>"Papua",:country_id=>101)
State.create(:id=>1241, :name=>"Bengkulu",:country_id=>101)
State.create(:id=>1242, :name=>"Bali",:country_id=>101)
State.create(:id=>1243, :name=>"Banten",:country_id=>101)
State.create(:id=>1244, :name=>"Gorontalo",:country_id=>101)
State.create(:id=>1245, :name=>"Bangka-Belitung",:country_id=>101)
State.create(:id=>1246, :name=>"<NAME>",:country_id=>101)
State.create(:id=>1247, :name=>"<NAME>",:country_id=>101)
State.create(:id=>1248, :name=>"<NAME>",:country_id=>101)
State.create(:id=>1249, :name=>"Riau Islands",:country_id=>101)
State.create(:id=>1250, :name=>"Connaught",:country_id=>102)
State.create(:id=>1251, :name=>"Leinster",:country_id=>102)
State.create(:id=>1252, :name=>"Munster",:country_id=>102)
State.create(:id=>1253, :name=>"Ulster",:country_id=>102)
State.create(:id=>1254, :name=>"Jerusalem District",:country_id=>103)
State.create(:id=>1255, :name=>"<NAME>",:country_id=>103)
State.create(:id=>1256, :name=>"Haifa",:country_id=>103)
State.create(:id=>1257, :name=>"Northern District",:country_id=>103)
State.create(:id=>1258, :name=>"Central District",:country_id=>103)
State.create(:id=>1259, :name=>"Southern District",:country_id=>103)
State.create(:id=>1260, :name=>"Bengal",:country_id=>105)
State.create(:id=>1261, :name=>"Uttar Pradesh",:country_id=>105)
State.create(:id=>1262, :name=>"Tripura",:country_id=>105)
State.create(:id=>1263, :name=>"Tamil Nādu",:country_id=>105)
State.create(:id=>1264, :name=>"Sikkim",:country_id=>105)
State.create(:id=>1265, :name=>"Rajasthan",:country_id=>105)
State.create(:id=>1266, :name=>"Punjab",:country_id=>105)
State.create(:id=>1267, :name=>"Pondicherry",:country_id=>105)
State.create(:id=>1268, :name=>"Orissa",:country_id=>105)
State.create(:id=>1269, :name=>"Nāgāland",:country_id=>105)
State.create(:id=>1270, :name=>"Mizoram",:country_id=>105)
State.create(:id=>1271, :name=>"Meghālaya",:country_id=>105)
State.create(:id=>1272, :name=>"Manipur",:country_id=>105)
State.create(:id=>1273, :name=>"Mahārāshtra",:country_id=>105)
State.create(:id=>1274, :name=>"Madhya Pradesh",:country_id=>105)
State.create(:id=>1275, :name=>"Laccadives",:country_id=>105)
State.create(:id=>1276, :name=>"Kerala",:country_id=>105)
State.create(:id=>1277, :name=>"Karnātaka",:country_id=>105)
State.create(:id=>1278, :name=>"Jammu and Kashmir",:country_id=>105)
State.create(:id=>1279, :name=>"Himachal Pradesh",:country_id=>105)
State.create(:id=>1280, :name=>"Haryana",:country_id=>105)
State.create(:id=>1281, :name=>"Gujarāt",:country_id=>105)
State.create(:id=>1282, :name=>"Daman and Diu",:country_id=>105)
State.create(:id=>1283, :name=>"Goa",:country_id=>105)
State.create(:id=>1284, :name=>"NCT",:country_id=>105)
State.create(:id=>1285, :name=>"Dādra and Nagar Haveli",:country_id=>105)
State.create(:id=>1286, :name=>"Chandīgarh",:country_id=>105)
State.create(:id=>1287, :name=>"Bihār",:country_id=>105)
State.create(:id=>1288, :name=>"Assam",:country_id=>105)
State.create(:id=>1289, :name=>"Arunāchal Pradesh",:country_id=>105)
State.create(:id=>1290, :name=>"Andhra Pradesh",:country_id=>105)
State.create(:id=>1291, :name=>"Andaman and Nicobar Islands",:country_id=>105)
State.create(:id=>1292, :name=>"Chhattisgarh",:country_id=>105)
State.create(:id=>1293, :name=>"Jharkhand",:country_id=>105)
State.create(:id=>1294, :name=>"Uttarakhand",:country_id=>105)
State.create(:id=>1295, :name=>"Al Başrah",:country_id=>107)
State.create(:id=>1296, :name=>"Wāsiţ",:country_id=>107)
State.create(:id=>1297, :name=>"Şalāḩ ad Dīn",:country_id=>107)
State.create(:id=>1298, :name=>"Nīnawá",:country_id=>107)
State.create(:id=>1299, :name=>"Maysan",:country_id=>107)
State.create(:id=>1300, :name=>"Karbalāʼ",:country_id=>107)
State.create(:id=>1301, :name=>"Arbīl",:country_id=>107)
State.create(:id=>1302, :name=>"Diyala",:country_id=>107)
State.create(:id=>1303, :name=>"Dhi Qar",:country_id=>107)
State.create(:id=>1304, :name=>"Dahūk",:country_id=>107)
State.create(:id=>1305, :name=>"Baghdād",:country_id=>107)
State.create(:id=>1306, :name=>"Bābil",:country_id=>107)
State.create(:id=>1307, :name=>"At Taʼmīm",:country_id=>107)
State.create(:id=>1308, :name=>"As Sulaymānīyah",:country_id=>107)
State.create(:id=>1309, :name=>"An Najaf",:country_id=>107)
State.create(:id=>1310, :name=>"Al Qādisīyah",:country_id=>107)
State.create(:id=>1311, :name=>"Al Muthanná",:country_id=>107)
State.create(:id=>1312, :name=>"Anbar",:country_id=>107)
State.create(:id=>1313, :name=>"Tehrān",:country_id=>108)
State.create(:id=>1314, :name=>"Zanjan",:country_id=>108)
State.create(:id=>1315, :name=>"Yazd",:country_id=>108)
State.create(:id=>1316, :name=>"Semnān",:country_id=>108)
State.create(:id=>1317, :name=>"Māzandarān",:country_id=>108)
State.create(:id=>1318, :name=>"Markazi",:country_id=>108)
State.create(:id=>1319, :name=>"Lorestān",:country_id=>108)
State.create(:id=>1320, :name=>"Kordestān",:country_id=>108)
State.create(:id=>1321, :name=>"Kohgīlūyeh va <NAME>mad",:country_id=>108)
State.create(:id=>1322, :name=>"Khūzestān",:country_id=>108)
State.create(:id=>1323, :name=>"Kermānshāh",:country_id=>108)
State.create(:id=>1324, :name=>"Kermān",:country_id=>108)
State.create(:id=>1325, :name=>"Īlām",:country_id=>108)
State.create(:id=>1326, :name=>"Hormozgān",:country_id=>108)
State.create(:id=>1327, :name=>"Hamadān",:country_id=>108)
State.create(:id=>1328, :name=>"Gīlān",:country_id=>108)
State.create(:id=>1329, :name=>"Fārs",:country_id=>108)
State.create(:id=>1330, :name=>"<NAME>ḩāll va Bakhtīārī",:country_id=>108)
State.create(:id=>1331, :name=>"Bushehr",:country_id=>108)
State.create(:id=>1332, :name=>"East Azarbaijan",:country_id=>108)
State.create(:id=>1333, :name=>"Āz̄ārbāyjān-e Gharbī",:country_id=>108)
State.create(:id=>1334, :name=>"Ardabīl",:country_id=>108)
State.create(:id=>1335, :name=>"Eşfahān",:country_id=>108)
State.create(:id=>1336, :name=>"Golestān",:country_id=>108)
State.create(:id=>1337, :name=>"Qazvīn",:country_id=>108)
State.create(:id=>1338, :name=>"Qom",:country_id=>108)
State.create(:id=>1339, :name=>"Sīstān va Balūchestān",:country_id=>108)
State.create(:id=>1340, :name=>"Khorāsān-<NAME>",:country_id=>108)
State.create(:id=>1341, :name=>"<NAME>",:country_id=>108)
State.create(:id=>1342, :name=>"Khorāsān-e Shomālī",:country_id=>108)
State.create(:id=>1343, :name=>"Ostān-e Alborz",:country_id=>108)
State.create(:id=>1344, :name=>"Northwest",:country_id=>109)
State.create(:id=>1345, :name=>"Northeast",:country_id=>109)
State.create(:id=>1346, :name=>"East",:country_id=>109)
State.create(:id=>1347, :name=>"South",:country_id=>109)
State.create(:id=>1348, :name=>"Capital Region",:country_id=>109)
State.create(:id=>1349, :name=>"Southern Peninsula",:country_id=>109)
State.create(:id=>1350, :name=>"West",:country_id=>109)
State.create(:id=>1351, :name=>"Westfjords",:country_id=>109)
State.create(:id=>1352, :name=>"Sicily",:country_id=>110)
State.create(:id=>1353, :name=>"Sardinia",:country_id=>110)
State.create(:id=>1354, :name=>"Calabria",:country_id=>110)
State.create(:id=>1355, :name=>"Veneto",:country_id=>110)
State.create(:id=>1356, :name=>"<NAME>",:country_id=>110)
State.create(:id=>1357, :name=>"Umbria",:country_id=>110)
State.create(:id=>1358, :name=>"<NAME>",:country_id=>110)
State.create(:id=>1359, :name=>"Tuscany",:country_id=>110)
State.create(:id=>1360, :name=>"Apulia",:country_id=>110)
State.create(:id=>1361, :name=>"Piedmont",:country_id=>110)
State.create(:id=>1362, :name=>"Molise",:country_id=>110)
State.create(:id=>1363, :name=>"The Marches",:country_id=>110)
State.create(:id=>1364, :name=>"Lombardy",:country_id=>110)
State.create(:id=>1365, :name=>"Liguria",:country_id=>110)
State.create(:id=>1366, :name=>"Latium",:country_id=>110)
State.create(:id=>1367, :name=>"<NAME>",:country_id=>110)
State.create(:id=>1368, :name=>"Emilia-Romagna",:country_id=>110)
State.create(:id=>1369, :name=>"Campania",:country_id=>110)
State.create(:id=>1370, :name=>"Basilicate",:country_id=>110)
State.create(:id=>1371, :name=>"Abruzzo",:country_id=>110)
State.create(:id=>1372, :name=>"Westmoreland",:country_id=>112)
State.create(:id=>1373, :name=>"Trelawny",:country_id=>112)
State.create(:id=>1374, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1375, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1376, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1377, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1378, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1379, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1380, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1381, :name=>"Portland",:country_id=>112)
State.create(:id=>1382, :name=>"Manchester",:country_id=>112)
State.create(:id=>1383, :name=>"Kingston",:country_id=>112)
State.create(:id=>1384, :name=>"<NAME>",:country_id=>112)
State.create(:id=>1385, :name=>"Clarendon",:country_id=>112)
State.create(:id=>1386, :name=>"Ma’an",:country_id=>113)
State.create(:id=>1387, :name=>"Irbid",:country_id=>113)
State.create(:id=>1388, :name=>"Zarqa",:country_id=>113)
State.create(:id=>1389, :name=>"Tafielah",:country_id=>113)
State.create(:id=>1390, :name=>"Amman",:country_id=>113)
State.create(:id=>1391, :name=>"Mafraq",:country_id=>113)
State.create(:id=>1392, :name=>"Karak",:country_id=>113)
State.create(:id=>1393, :name=>"Balqa",:country_id=>113)
State.create(:id=>1394, :name=>"Ajlun",:country_id=>113)
State.create(:id=>1395, :name=>"Jerash",:country_id=>113)
State.create(:id=>1396, :name=>"Aqaba",:country_id=>113)
State.create(:id=>1397, :name=>"Madaba",:country_id=>113)
State.create(:id=>1398, :name=>"Yamanashi",:country_id=>114)
State.create(:id=>1399, :name=>"Yamaguchi",:country_id=>114)
State.create(:id=>1400, :name=>"Wakayama",:country_id=>114)
State.create(:id=>1401, :name=>"Toyama",:country_id=>114)
State.create(:id=>1402, :name=>"Tottori",:country_id=>114)
State.create(:id=>1403, :name=>"Tōkyō",:country_id=>114)
State.create(:id=>1404, :name=>"Tokushima",:country_id=>114)
State.create(:id=>1405, :name=>"Tochigi",:country_id=>114)
State.create(:id=>1406, :name=>"Shizuoka",:country_id=>114)
State.create(:id=>1407, :name=>"Shimane",:country_id=>114)
State.create(:id=>1408, :name=>"Shiga",:country_id=>114)
State.create(:id=>1409, :name=>"Saitama",:country_id=>114)
State.create(:id=>1410, :name=>"Saga",:country_id=>114)
State.create(:id=>1411, :name=>"Ōsaka",:country_id=>114)
State.create(:id=>1412, :name=>"Okinawa",:country_id=>114)
State.create(:id=>1413, :name=>"Okayama",:country_id=>114)
State.create(:id=>1414, :name=>"Ōita",:country_id=>114)
State.create(:id=>1415, :name=>"Niigata",:country_id=>114)
State.create(:id=>1416, :name=>"Nara",:country_id=>114)
State.create(:id=>1417, :name=>"Nagasaki",:country_id=>114)
State.create(:id=>1418, :name=>"Nagano",:country_id=>114)
State.create(:id=>1419, :name=>"Miyazaki",:country_id=>114)
State.create(:id=>1420, :name=>"Mie",:country_id=>114)
State.create(:id=>1421, :name=>"Kyōto",:country_id=>114)
State.create(:id=>1422, :name=>"Kumamoto",:country_id=>114)
State.create(:id=>1423, :name=>"Kōchi",:country_id=>114)
State.create(:id=>1424, :name=>"Kanagawa",:country_id=>114)
State.create(:id=>1425, :name=>"Kagoshima",:country_id=>114)
State.create(:id=>1426, :name=>"Kagawa",:country_id=>114)
State.create(:id=>1427, :name=>"Ishikawa",:country_id=>114)
State.create(:id=>1428, :name=>"Hyōgo",:country_id=>114)
State.create(:id=>1429, :name=>"Hiroshima",:country_id=>114)
State.create(:id=>1430, :name=>"Gunma",:country_id=>114)
State.create(:id=>1431, :name=>"Gifu",:country_id=>114)
State.create(:id=>1432, :name=>"Fukuoka",:country_id=>114)
State.create(:id=>1433, :name=>"Fukui",:country_id=>114)
State.create(:id=>1434, :name=>"Ehime",:country_id=>114)
State.create(:id=>1435, :name=>"Aichi",:country_id=>114)
State.create(:id=>1436, :name=>"Yamagata",:country_id=>114)
State.create(:id=>1437, :name=>"Miyagi",:country_id=>114)
State.create(:id=>1438, :name=>"Iwate",:country_id=>114)
State.create(:id=>1439, :name=>"Ibaraki",:country_id=>114)
State.create(:id=>1440, :name=>"Fukushima",:country_id=>114)
State.create(:id=>1441, :name=>"Chiba",:country_id=>114)
State.create(:id=>1442, :name=>"Akita",:country_id=>114)
State.create(:id=>1443, :name=>"Hokkaidō",:country_id=>114)
State.create(:id=>1444, :name=>"Aomori",:country_id=>114)
State.create(:id=>1445, :name=>"Nyanza",:country_id=>115)
State.create(:id=>1446, :name=>"Nairobi Area",:country_id=>115)
State.create(:id=>1447, :name=>"Coast",:country_id=>115)
State.create(:id=>1448, :name=>"Eastern",:country_id=>115)
State.create(:id=>1449, :name=>"Central",:country_id=>115)
State.create(:id=>1450, :name=>"Western",:country_id=>115)
State.create(:id=>1451, :name=>"Rift Valley",:country_id=>115)
State.create(:id=>1452, :name=>"North-Eastern",:country_id=>115)
State.create(:id=>1453, :name=>"Osh",:country_id=>116)
State.create(:id=>1454, :name=>"Batken",:country_id=>116)
State.create(:id=>1455, :name=>"Talas",:country_id=>116)
State.create(:id=>1456, :name=>"Naryn",:country_id=>116)
State.create(:id=>1457, :name=>"Ysyk-Köl",:country_id=>116)
State.create(:id=>1458, :name=>"Bishkek",:country_id=>116)
State.create(:id=>1459, :name=>"Jalal-Abad",:country_id=>116)
State.create(:id=>1460, :name=>"Chüy",:country_id=>116)
State.create(:id=>1461, :name=>"Poŭthĭsăt",:country_id=>117)
State.create(:id=>1462, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1463, :name=>"Takêv",:country_id=>117)
State.create(:id=>1464, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1465, :name=>"Stœ̆<NAME>",:country_id=>117)
State.create(:id=>1466, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1467, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1468, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1469, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1470, :name=>"<NAME>héar",:country_id=>117)
State.create(:id=>1471, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1472, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1473, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1474, :name=>"Krâchéh",:country_id=>117)
State.create(:id=>1475, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1476, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1477, :name=>"Kândal",:country_id=>117)
State.create(:id=>1478, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1479, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1480, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1481, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1482, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1483, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1484, :name=>"<NAME>",:country_id=>117)
State.create(:id=>1485, :name=>"Battambang",:country_id=>117)
State.create(:id=>1486, :name=>"<NAME>",:country_id=>118)
State.create(:id=>1487, :name=>"Line Islands",:country_id=>118)
State.create(:id=>1488, :name=>"Phoenix Islands",:country_id=>118)
State.create(:id=>1489, :name=>"Mohéli",:country_id=>119)
State.create(:id=>1490, :name=>"<NAME>",:country_id=>119)
State.create(:id=>1491, :name=>"Anjouan",:country_id=>119)
State.create(:id=>1492, :name=>"Trinity Palmetto Point",:country_id=>120)
State.create(:id=>1493, :name=>"Saint Thomas Middle Island",:country_id=>120)
State.create(:id=>1494, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1495, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1496, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1497, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1498, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1499, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1500, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1501, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1502, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1503, :name=>"Saint <NAME>",:country_id=>120)
State.create(:id=>1504, :name=>"Saint Anne Sandy Point",:country_id=>120)
State.create(:id=>1505, :name=>"Christ Church Nichola Town",:country_id=>120)
State.create(:id=>1506, :name=>"P'yŏngyang-si",:country_id=>121)
State.create(:id=>1507, :name=>"P'yŏngan-namdo",:country_id=>121)
State.create(:id=>1508, :name=>"P'yŏngan-bukto",:country_id=>121)
State.create(:id=>1509, :name=>"Gangwon",:country_id=>121)
State.create(:id=>1510, :name=>"Hwanghae-namdo",:country_id=>121)
State.create(:id=>1511, :name=>"Hwanghae-bukto",:country_id=>121)
State.create(:id=>1512, :name=>"Hamgyŏng-namdo",:country_id=>121)
State.create(:id=>1513, :name=>"Yanggang-do",:country_id=>121)
State.create(:id=>1514, :name=>"Hamgyŏng-bukto",:country_id=>121)
State.create(:id=>1515, :name=>"Chagang-do",:country_id=>121)
State.create(:id=>1516, :name=>"Najin Sŏnbong-si",:country_id=>121)
State.create(:id=>1517, :name=>"Ulsan",:country_id=>122)
State.create(:id=>1518, :name=>"Daejeon",:country_id=>122)
State.create(:id=>1519, :name=>"Daegu",:country_id=>122)
State.create(:id=>1520, :name=>"Seoul",:country_id=>122)
State.create(:id=>1521, :name=>"Busan",:country_id=>122)
State.create(:id=>1522, :name=>"North Gyeongsang",:country_id=>122)
State.create(:id=>1523, :name=>"Gyeonggi",:country_id=>122)
State.create(:id=>1524, :name=>"Gwangju",:country_id=>122)
State.create(:id=>1525, :name=>"Gangwon",:country_id=>122)
State.create(:id=>1526, :name=>"Incheon",:country_id=>122)
State.create(:id=>1527, :name=>"South Chungcheong",:country_id=>122)
State.create(:id=>1528, :name=>"North Chungcheong",:country_id=>122)
State.create(:id=>1529, :name=>"South Jeolla",:country_id=>122)
State.create(:id=>1530, :name=>"North Jeolla",:country_id=>122)
State.create(:id=>1531, :name=>"Jeju",:country_id=>122)
State.create(:id=>1532, :name=>"South Gyeongsang",:country_id=>122)
State.create(:id=>1533, :name=>"Ḩawallī",:country_id=>124)
State.create(:id=>1534, :name=>"Al ‘Āşimah",:country_id=>124)
State.create(:id=>1535, :name=>"Al Jahrāʼ",:country_id=>124)
State.create(:id=>1536, :name=>"Al Farwaniyah",:country_id=>124)
State.create(:id=>1537, :name=>"Al Aḩmadī",:country_id=>124)
State.create(:id=>1538, :name=>"Muḩāfaz̧at Mubārak al Kabīr",:country_id=>124)
State.create(:id=>1539, :name=>"Batys Qazaqstan",:country_id=>126)
State.create(:id=>1540, :name=>"Mangghystaū",:country_id=>126)
State.create(:id=>1541, :name=>"Atyraū",:country_id=>126)
State.create(:id=>1542, :name=>"Aqtöbe",:country_id=>126)
State.create(:id=>1543, :name=>"East Kazakhstan",:country_id=>126)
State.create(:id=>1544, :name=>"Aqmola",:country_id=>126)
State.create(:id=>1545, :name=>"Soltüstik Qazaqstan",:country_id=>126)
State.create(:id=>1546, :name=>"Pavlodar",:country_id=>126)
State.create(:id=>1547, :name=>"Qyzylorda",:country_id=>126)
State.create(:id=>1548, :name=>"Qostanay",:country_id=>126)
State.create(:id=>1549, :name=>"Qaraghandy",:country_id=>126)
State.create(:id=>1550, :name=>"Zhambyl",:country_id=>126)
State.create(:id=>1551, :name=>"Ongtüstik Qazaqstan",:country_id=>126)
State.create(:id=>1552, :name=>"Almaty Qalasy",:country_id=>126)
State.create(:id=>1553, :name=>"Almaty",:country_id=>126)
State.create(:id=>1554, :name=>"Bayqongyr Qalasy",:country_id=>126)
State.create(:id=>1555, :name=>"Astana Qalasy",:country_id=>126)
State.create(:id=>1556, :name=>"Xiangkhoang",:country_id=>127)
State.create(:id=>1557, :name=>"Xiagnabouli",:country_id=>127)
State.create(:id=>1558, :name=>"<NAME>angchan",:country_id=>127)
State.create(:id=>1559, :name=>"Savannahkhét",:country_id=>127)
State.create(:id=>1560, :name=>"Salavan",:country_id=>127)
State.create(:id=>1561, :name=>"Phôngsali",:country_id=>127)
State.create(:id=>1562, :name=>"Oudômxai",:country_id=>127)
State.create(:id=>1563, :name=>"Louangphabang",:country_id=>127)
State.create(:id=>1564, :name=>"Loungnamtha",:country_id=>127)
State.create(:id=>1565, :name=>"Khammouan",:country_id=>127)
State.create(:id=>1566, :name=>"Houaphan",:country_id=>127)
State.create(:id=>1567, :name=>"Champasak",:country_id=>127)
State.create(:id=>1568, :name=>"Attapu",:country_id=>127)
State.create(:id=>1569, :name=>"Xékong",:country_id=>127)
State.create(:id=>1570, :name=>"Bokèo",:country_id=>127)
State.create(:id=>1571, :name=>"Bolikhamxai",:country_id=>127)
State.create(:id=>1572, :name=>"Viangchan",:country_id=>127)
State.create(:id=>1573, :name=>"Mont-Liban",:country_id=>128)
State.create(:id=>1574, :name=>"Beyrouth",:country_id=>128)
State.create(:id=>1575, :name=>"Liban-Nord",:country_id=>128)
State.create(:id=>1576, :name=>"Liban-Sud",:country_id=>128)
State.create(:id=>1577, :name=>"Béqaa",:country_id=>128)
State.create(:id=>1578, :name=>"Nabatîyé",:country_id=>128)
State.create(:id=>1579, :name=>"Aakkâr",:country_id=>128)
State.create(:id=>1580, :name=>"Baalbek-Hermel",:country_id=>128)
State.create(:id=>1581, :name=>"Vieux-Fort",:country_id=>129)
State.create(:id=>1582, :name=>"Soufrière",:country_id=>129)
State.create(:id=>1583, :name=>"Praslin",:country_id=>129)
State.create(:id=>1584, :name=>"Micoud",:country_id=>129)
State.create(:id=>1585, :name=>"Laborie",:country_id=>129)
State.create(:id=>1586, :name=>"Gros-Islet",:country_id=>129)
State.create(:id=>1587, :name=>"Dennery",:country_id=>129)
State.create(:id=>1588, :name=>"Dauphin",:country_id=>129)
State.create(:id=>1589, :name=>"Choiseul",:country_id=>129)
State.create(:id=>1590, :name=>"Castries",:country_id=>129)
State.create(:id=>1591, :name=>"Anse-la-Raye",:country_id=>129)
State.create(:id=>1592, :name=>"Vaduz",:country_id=>130)
State.create(:id=>1593, :name=>"Triesenberg",:country_id=>130)
State.create(:id=>1594, :name=>"Triesen",:country_id=>130)
State.create(:id=>1595, :name=>"Schellenberg",:country_id=>130)
State.create(:id=>1596, :name=>"Schaan",:country_id=>130)
State.create(:id=>1597, :name=>"Ruggell",:country_id=>130)
State.create(:id=>1598, :name=>"Planken",:country_id=>130)
State.create(:id=>1599, :name=>"Mauren",:country_id=>130)
State.create(:id=>1600, :name=>"Gamprin",:country_id=>130)
State.create(:id=>1601, :name=>"Eschen",:country_id=>130)
State.create(:id=>1602, :name=>"Balzers",:country_id=>130)
State.create(:id=>1603, :name=>"Western",:country_id=>131)
State.create(:id=>1604, :name=>"Uva",:country_id=>131)
State.create(:id=>1605, :name=>"Southern",:country_id=>131)
State.create(:id=>1606, :name=>"Sabaragamuwa",:country_id=>131)
State.create(:id=>1607, :name=>"North Western",:country_id=>131)
State.create(:id=>1608, :name=>"North Eastern",:country_id=>131)
State.create(:id=>1609, :name=>"North Central",:country_id=>131)
State.create(:id=>1610, :name=>"Central",:country_id=>131)
State.create(:id=>1611, :name=>"Northern Province",:country_id=>131)
State.create(:id=>1612, :name=>"Sinoe",:country_id=>132)
State.create(:id=>1613, :name=>"Nimba",:country_id=>132)
State.create(:id=>1614, :name=>"Montserrado",:country_id=>132)
State.create(:id=>1615, :name=>"Maryland",:country_id=>132)
State.create(:id=>1616, :name=>"Lofa",:country_id=>132)
State.create(:id=>1617, :name=>"Grand Gedeh",:country_id=>132)
State.create(:id=>1618, :name=>"Grand Cape Mount",:country_id=>132)
State.create(:id=>1619, :name=>"Grand Bassa",:country_id=>132)
State.create(:id=>1620, :name=>"Bong",:country_id=>132)
State.create(:id=>1621, :name=>"Bomi",:country_id=>132)
State.create(:id=>1622, :name=>"Grand Kru",:country_id=>132)
State.create(:id=>1623, :name=>"Margibi",:country_id=>132)
State.create(:id=>1624, :name=>"River Cess",:country_id=>132)
State.create(:id=>1625, :name=>"Gbarpolu",:country_id=>132)
State.create(:id=>1626, :name=>"<NAME>",:country_id=>132)
State.create(:id=>1627, :name=>"Thaba-Tseka",:country_id=>133)
State.create(:id=>1628, :name=>"Quthing",:country_id=>133)
State.create(:id=>1629, :name=>"<NAME>",:country_id=>133)
State.create(:id=>1630, :name=>"Mokhotlong",:country_id=>133)
State.create(:id=>1631, :name=>"Mohaleʼ<NAME>",:country_id=>133)
State.create(:id=>1632, :name=>"Maseru",:country_id=>133)
State.create(:id=>1633, :name=>"Mafeteng",:country_id=>133)
State.create(:id=>1634, :name=>"Leribe",:country_id=>133)
State.create(:id=>1635, :name=>"Butha-Buthe",:country_id=>133)
State.create(:id=>1636, :name=>"Berea",:country_id=>133)
State.create(:id=>1637, :name=>"Alytaus Apskritis",:country_id=>134)
State.create(:id=>1638, :name=>"Kauno Apskritis",:country_id=>134)
State.create(:id=>1639, :name=>"Klaipėdos Apskritis",:country_id=>134)
State.create(:id=>1640, :name=>"Marijampolės Apskritis",:country_id=>134)
State.create(:id=>1641, :name=>"Panevėžio Apskritis",:country_id=>134)
State.create(:id=>1642, :name=>"Šiaulių Apskritis",:country_id=>134)
State.create(:id=>1643, :name=>"Tauragės Apskritis",:country_id=>134)
State.create(:id=>1644, :name=>"Telšių Apskritis",:country_id=>134)
State.create(:id=>1645, :name=>"Utenos Apskritis",:country_id=>134)
State.create(:id=>1646, :name=>"Vilniaus Apskritis",:country_id=>134)
State.create(:id=>1647, :name=>"Luxembourg",:country_id=>135)
State.create(:id=>1648, :name=>"Grevenmacher",:country_id=>135)
State.create(:id=>1649, :name=>"Diekirch",:country_id=>135)
State.create(:id=>1650, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1651, :name=>"Ventspils",:country_id=>136)
State.create(:id=>1652, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1653, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1654, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1655, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1656, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1657, :name=>"Rīgas Rajons",:country_id=>136)
State.create(:id=>1658, :name=>"Rīga",:country_id=>136)
State.create(:id=>1659, :name=>"Rēzeknes Rajons",:country_id=>136)
State.create(:id=>1660, :name=>"Rēzekne",:country_id=>136)
State.create(:id=>1661, :name=>"Preiļu Rajons",:country_id=>136)
State.create(:id=>1662, :name=>"Ogres Rajons",:country_id=>136)
State.create(:id=>1663, :name=>"Madonas Rajons",:country_id=>136)
State.create(:id=>1664, :name=>"Ludzas Rajons",:country_id=>136)
State.create(:id=>1665, :name=>"Limbažu Rajons",:country_id=>136)
State.create(:id=>1666, :name=>"Liepājas Rajons",:country_id=>136)
State.create(:id=>1667, :name=>"Liepāja",:country_id=>136)
State.create(:id=>1668, :name=>"Kuldīgas Rajons",:country_id=>136)
State.create(:id=>1669, :name=>"Krāslavas Rajons",:country_id=>136)
State.create(:id=>1670, :name=>"Jūrmala",:country_id=>136)
State.create(:id=>1671, :name=>"Jelgavas Rajons",:country_id=>136)
State.create(:id=>1672, :name=>"Jelgava",:country_id=>136)
State.create(:id=>1673, :name=>"Jēkabpils Rajons",:country_id=>136)
State.create(:id=>1674, :name=>"Gulbenes Rajons",:country_id=>136)
State.create(:id=>1675, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1676, :name=>"<NAME>ons",:country_id=>136)
State.create(:id=>1677, :name=>"Daugavpils",:country_id=>136)
State.create(:id=>1678, :name=>"Cēsu Rajons",:country_id=>136)
State.create(:id=>1679, :name=>"Bauskas Rajons",:country_id=>136)
State.create(:id=>1680, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1681, :name=>"Alūksnes Rajons",:country_id=>136)
State.create(:id=>1682, :name=>"Aizkraukles Rajons",:country_id=>136)
State.create(:id=>1683, :name=>"Dundagas Novads",:country_id=>136)
State.create(:id=>1684, :name=>"Alsungas Novads",:country_id=>136)
State.create(:id=>1685, :name=>"Pāvilostas Novads",:country_id=>136)
State.create(:id=>1686, :name=>"Nīcas Novads",:country_id=>136)
State.create(:id=>1687, :name=>"Rucavas Novads",:country_id=>136)
State.create(:id=>1688, :name=>"Grobiņas Novads",:country_id=>136)
State.create(:id=>1689, :name=>"Durbes Novads",:country_id=>136)
State.create(:id=>1690, :name=>"Aizputes Novads",:country_id=>136)
State.create(:id=>1691, :name=>"Priekules Novads",:country_id=>136)
State.create(:id=>1692, :name=>"Vaiņodes Novads",:country_id=>136)
State.create(:id=>1693, :name=>"Skrundas Novads",:country_id=>136)
State.create(:id=>1694, :name=>"Brocēnu Novads",:country_id=>136)
State.create(:id=>1695, :name=>"Rojas Novads",:country_id=>136)
State.create(:id=>1696, :name=>"Kandavas Novads",:country_id=>136)
State.create(:id=>1697, :name=>"Auces Novads",:country_id=>136)
State.create(:id=>1698, :name=>"Jaunpils Novads",:country_id=>136)
State.create(:id=>1699, :name=>"Engures Novads",:country_id=>136)
State.create(:id=>1700, :name=>"Tērvetes Novads",:country_id=>136)
State.create(:id=>1701, :name=>"Ozolnieku Novads",:country_id=>136)
State.create(:id=>1702, :name=>"Rundāles Novads",:country_id=>136)
State.create(:id=>1703, :name=>"Babītes Novads",:country_id=>136)
State.create(:id=>1704, :name=>"Mārupes Novads",:country_id=>136)
State.create(:id=>1705, :name=>"Olaines Novads",:country_id=>136)
State.create(:id=>1706, :name=>"Iecavas Novads",:country_id=>136)
State.create(:id=>1707, :name=>"Ķekavas Novads",:country_id=>136)
State.create(:id=>1708, :name=>"Salaspils Novads",:country_id=>136)
State.create(:id=>1709, :name=>"Baldones Novads",:country_id=>136)
State.create(:id=>1710, :name=>"Stopiņu Novads",:country_id=>136)
State.create(:id=>1711, :name=>"Carnikavas Novads",:country_id=>136)
State.create(:id=>1712, :name=>"Ādažu Novads",:country_id=>136)
State.create(:id=>1713, :name=>"Garkalnes Novads",:country_id=>136)
State.create(:id=>1714, :name=>"Vecumnieku Novads",:country_id=>136)
State.create(:id=>1715, :name=>"Ķeguma Novads",:country_id=>136)
State.create(:id=>1716, :name=>"Lielvārdes Novads",:country_id=>136)
State.create(:id=>1717, :name=>"Skrīveru Novads",:country_id=>136)
State.create(:id=>1718, :name=>"Jaunjelgavas Novads",:country_id=>136)
State.create(:id=>1719, :name=>"Neretas Novads",:country_id=>136)
State.create(:id=>1720, :name=>"Viesītes Novads",:country_id=>136)
State.create(:id=>1721, :name=>"Salas Novads",:country_id=>136)
State.create(:id=>1722, :name=>"Jēkabpils",:country_id=>136)
State.create(:id=>1723, :name=>"Aknīstes Novads",:country_id=>136)
State.create(:id=>1724, :name=>"Ilūkstes Novads",:country_id=>136)
State.create(:id=>1725, :name=>"Vārkavas Novads",:country_id=>136)
State.create(:id=>1726, :name=>"Līvānu Novads",:country_id=>136)
State.create(:id=>1727, :name=>"Varakļānu Novads",:country_id=>136)
State.create(:id=>1728, :name=>"Viļānu Novads",:country_id=>136)
State.create(:id=>1729, :name=>"Riebiņu Novads",:country_id=>136)
State.create(:id=>1730, :name=>"Aglonas Novads",:country_id=>136)
State.create(:id=>1731, :name=>"Ciblas Novads",:country_id=>136)
State.create(:id=>1732, :name=>"Zilupes Novads",:country_id=>136)
State.create(:id=>1733, :name=>"Viļakas Novads",:country_id=>136)
State.create(:id=>1734, :name=>"Baltinavas Novads",:country_id=>136)
State.create(:id=>1735, :name=>"Dagdas Novads",:country_id=>136)
State.create(:id=>1736, :name=>"Kārsavas Novads",:country_id=>136)
State.create(:id=>1737, :name=>"Rugāju Novads",:country_id=>136)
State.create(:id=>1738, :name=>"Cesvaines Novads",:country_id=>136)
State.create(:id=>1739, :name=>"Lubānas Novads",:country_id=>136)
State.create(:id=>1740, :name=>"Krustpils Novads",:country_id=>136)
State.create(:id=>1741, :name=>"Pļaviņu Novads",:country_id=>136)
State.create(:id=>1742, :name=>"Kokneses Novads",:country_id=>136)
State.create(:id=>1743, :name=>"Ikšķiles Novads",:country_id=>136)
State.create(:id=>1744, :name=>"Ropažu Novads",:country_id=>136)
State.create(:id=>1745, :name=>"Inčukalna Novads",:country_id=>136)
State.create(:id=>1746, :name=>"Krimuldas Novads",:country_id=>136)
State.create(:id=>1747, :name=>"Siguldas Novads",:country_id=>136)
State.create(:id=>1748, :name=>"Līgatnes Novads",:country_id=>136)
State.create(:id=>1749, :name=>"Mālpils Novads",:country_id=>136)
State.create(:id=>1750, :name=>"Sējas Novads",:country_id=>136)
State.create(:id=>1751, :name=>"Saulkrastu Novads",:country_id=>136)
State.create(:id=>1752, :name=>"Salacgrīvas Novads",:country_id=>136)
State.create(:id=>1753, :name=>"Alojas Novads",:country_id=>136)
State.create(:id=>1754, :name=>"Naukšēnu Novads",:country_id=>136)
State.create(:id=>1755, :name=>"Rūjienas Novads",:country_id=>136)
State.create(:id=>1756, :name=>"Mazsalacas Novads",:country_id=>136)
State.create(:id=>1757, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1758, :name=>"Pārgaujas Novads",:country_id=>136)
State.create(:id=>1759, :name=>"Kocēnu Novads",:country_id=>136)
State.create(:id=>1760, :name=>"Amatas Novads",:country_id=>136)
State.create(:id=>1761, :name=>"Priekuļu Novads",:country_id=>136)
State.create(:id=>1762, :name=>"Raunas Novads",:country_id=>136)
State.create(:id=>1763, :name=>"Strenču Novads",:country_id=>136)
State.create(:id=>1764, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1765, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1766, :name=>"Jaunpiebalgas Novads",:country_id=>136)
State.create(:id=>1767, :name=>"Ērgļu Novads",:country_id=>136)
State.create(:id=>1768, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1769, :name=>"<NAME>",:country_id=>136)
State.create(:id=>1770, :name=>"Darnah",:country_id=>137)
State.create(:id=>1771, :name=>"Banghāzī",:country_id=>137)
State.create(:id=>1772, :name=>"Al Marj",:country_id=>137)
State.create(:id=>1773, :name=>"Al Kufrah",:country_id=>137)
State.create(:id=>1774, :name=>"Al Jabal al Akhḑar",:country_id=>137)
State.create(:id=>1775, :name=>"Ţarābulus",:country_id=>137)
State.create(:id=>1776, :name=>"Surt",:country_id=>137)
State.create(:id=>1777, :name=>"Sabhā",:country_id=>137)
State.create(:id=>1778, :name=>"Nālūt",:country_id=>137)
State.create(:id=>1779, :name=>"Murzuq",:country_id=>137)
State.create(:id=>1780, :name=>"Mişrātah",:country_id=>137)
State.create(:id=>1781, :name=>"Ghāt",:country_id=>137)
State.create(:id=>1782, :name=>"Az Zāwiyah",:country_id=>137)
State.create(:id=>1783, :name=>"Ash Shāţiʼ",:country_id=>137)
State.create(:id=>1784, :name=>"Al Jufrah",:country_id=>137)
State.create(:id=>1785, :name=>"An Nuqāţ al Khams",:country_id=>137)
State.create(:id=>1786, :name=>"Sha‘bīyat al Buţnān",:country_id=>137)
State.create(:id=>1787, :name=>"Sha‘bīyat al Jabal al Gharbī",:country_id=>137)
State.create(:id=>1788, :name=>"Sha‘bīyat al Jafārah",:country_id=>137)
State.create(:id=>1789, :name=>"Sha‘bīyat al Marqab",:country_id=>137)
State.create(:id=>1790, :name=>"<NAME>",:country_id=>137)
State.create(:id=>1791, :name=>"<NAME>",:country_id=>137)
State.create(:id=>1792, :name=>"Rabat-Salé-Zemmour-Zaër",:country_id=>138)
State.create(:id=>1793, :name=>"Meknès-Tafilalet",:country_id=>138)
State.create(:id=>1794, :name=>"Marrakech-Tensift-<NAME>",:country_id=>138)
State.create(:id=>1795, :name=>"Fès-Boulemane",:country_id=>138)
State.create(:id=>1796, :name=>"Grand Casablanca",:country_id=>138)
State.create(:id=>1797, :name=>"Chaouia-Ouardigha",:country_id=>138)
State.create(:id=>1798, :name=>"Doukkala-Abda",:country_id=>138)
State.create(:id=>1799, :name=>"Gharb-Chrarda-<NAME>",:country_id=>138)
State.create(:id=>1800, :name=>"Guelmim-Es Smara",:country_id=>138)
State.create(:id=>1801, :name=>"Oriental",:country_id=>138)
State.create(:id=>1802, :name=>"Souss-Massa-Drâa",:country_id=>138)
State.create(:id=>1803, :name=>"Tadla-Azilal",:country_id=>138)
State.create(:id=>1804, :name=>"Tanger-Tétouan",:country_id=>138)
State.create(:id=>1805, :name=>"Taza-Al Hoceima-Taounate",:country_id=>138)
State.create(:id=>1806, :name=>"Laâyoune-Boujdour-S<NAME>",:country_id=>138)
State.create(:id=>1807, :name=>"<NAME>",:country_id=>138)
State.create(:id=>1808, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1809, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1810, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1811, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1812, :name=>"Ştefan-Vodă",:country_id=>140)
State.create(:id=>1813, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1814, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1815, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1816, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1817, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1818, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1819, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1820, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1821, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1822, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1823, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1824, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1825, :name=>"Chişinău",:country_id=>140)
State.create(:id=>1826, :name=>"Căuşeni",:country_id=>140)
State.create(:id=>1827, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1828, :name=>"Călăraşi",:country_id=>140)
State.create(:id=>1829, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1830, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1831, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1832, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1833, :name=>"Dubăsari",:country_id=>140)
State.create(:id=>1834, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1835, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1836, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1837, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1838, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1839, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1840, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1841, :name=>"<NAME>",:country_id=>140)
State.create(:id=>1842, :name=>"Găgăuzia",:country_id=>140)
State.create(:id=>1843, :name=>"Bender",:country_id=>140)
State.create(:id=>1844, :name=>"Bălţi",:country_id=>140)
State.create(:id=>1845, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1846, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1847, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1848, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1849, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1850, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1851, :name=>"Opština Plužine",:country_id=>141)
State.create(:id=>1852, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1853, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1854, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1855, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1856, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1857, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1858, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1859, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1860, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1861, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1862, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1863, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1864, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1865, :name=>"<NAME>",:country_id=>141)
State.create(:id=>1866, :name=>"Diana",:country_id=>143)
State.create(:id=>1867, :name=>"Sava",:country_id=>143)
State.create(:id=>1868, :name=>"Sofia",:country_id=>143)
State.create(:id=>1869, :name=>"Analanjirofo",:country_id=>143)
State.create(:id=>1870, :name=>"Boeny",:country_id=>143)
State.create(:id=>1871, :name=>"Betsiboka",:country_id=>143)
State.create(:id=>1872, :name=>"Alaotra-Mangoro",:country_id=>143)
State.create(:id=>1873, :name=>"Melaky",:country_id=>143)
State.create(:id=>1874, :name=>"Bongolava",:country_id=>143)
State.create(:id=>1875, :name=>"Vakinankaratra",:country_id=>143)
State.create(:id=>1876, :name=>"Itasy",:country_id=>143)
State.create(:id=>1877, :name=>"Analamanga",:country_id=>143)
State.create(:id=>1878, :name=>"East",:country_id=>143)
State.create(:id=>1879, :name=>"Menabe",:country_id=>143)
State.create(:id=>1880, :name=>"<NAME>",:country_id=>143)
State.create(:id=>1881, :name=>"<NAME>",:country_id=>143)
State.create(:id=>1882, :name=>"Vatovavy-Fitovinany",:country_id=>143)
State.create(:id=>1883, :name=>"Ihorombe",:country_id=>143)
State.create(:id=>1884, :name=>"South-East",:country_id=>143)
State.create(:id=>1885, :name=>"Anosy",:country_id=>143)
State.create(:id=>1886, :name=>"Androy",:country_id=>143)
State.create(:id=>1887, :name=>"South-West",:country_id=>143)
State.create(:id=>1888, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1889, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1890, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1891, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1892, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1893, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1894, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1895, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1896, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1897, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1898, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1899, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1900, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1901, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1902, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1903, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1904, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1905, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1906, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1907, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1908, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1909, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1910, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1911, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1912, :name=>"Ujelang",:country_id=>144)
State.create(:id=>1913, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1914, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1915, :name=>"<NAME>",:country_id=>144)
State.create(:id=>1916, :name=>"Jabat Island",:country_id=>144)
State.create(:id=>1917, :name=>"Jemo Island",:country_id=>144)
State.create(:id=>1918, :name=>"Kili Island",:country_id=>144)
State.create(:id=>1919, :name=>"Lib Island",:country_id=>144)
State.create(:id=>1920, :name=>"Mejit Island",:country_id=>144)
State.create(:id=>1921, :name=>"Valandovo",:country_id=>145)
State.create(:id=>1922, :name=>"Kratovo",:country_id=>145)
State.create(:id=>1923, :name=>"Pehčevo",:country_id=>145)
State.create(:id=>1924, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1925, :name=>"Bosilovo",:country_id=>145)
State.create(:id=>1926, :name=>"Vasilevo",:country_id=>145)
State.create(:id=>1927, :name=>"Dojran",:country_id=>145)
State.create(:id=>1928, :name=>"Bogdanci",:country_id=>145)
State.create(:id=>1929, :name=>"Konče",:country_id=>145)
State.create(:id=>1930, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1931, :name=>"Zrnovci",:country_id=>145)
State.create(:id=>1932, :name=>"Karbinci",:country_id=>145)
State.create(:id=>1933, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1934, :name=>"Rosoman",:country_id=>145)
State.create(:id=>1935, :name=>"Gradsko",:country_id=>145)
State.create(:id=>1936, :name=>"Lozovo",:country_id=>145)
State.create(:id=>1937, :name=>"Češinovo",:country_id=>145)
State.create(:id=>1938, :name=>"Novaci",:country_id=>145)
State.create(:id=>1939, :name=>"Berovo",:country_id=>145)
State.create(:id=>1940, :name=>"Bitola",:country_id=>145)
State.create(:id=>1941, :name=>"Mogila",:country_id=>145)
State.create(:id=>1942, :name=>"Aračinovo",:country_id=>145)
State.create(:id=>1943, :name=>"Bogovinje",:country_id=>145)
State.create(:id=>1944, :name=>"Brvenica",:country_id=>145)
State.create(:id=>1945, :name=>"Čair",:country_id=>145)
State.create(:id=>1946, :name=>"Čaška",:country_id=>145)
State.create(:id=>1947, :name=>"Centar",:country_id=>145)
State.create(:id=>1948, :name=>"Centar Župa",:country_id=>145)
State.create(:id=>1949, :name=>"Čučer-Sandevo",:country_id=>145)
State.create(:id=>1950, :name=>"Debar",:country_id=>145)
State.create(:id=>1951, :name=>"Delčevo",:country_id=>145)
State.create(:id=>1952, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1953, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1954, :name=>"Drugovo",:country_id=>145)
State.create(:id=>1955, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1956, :name=>"Gevgelija",:country_id=>145)
State.create(:id=>1957, :name=>"Gostivar",:country_id=>145)
State.create(:id=>1958, :name=>"Ilinden",:country_id=>145)
State.create(:id=>1959, :name=>"Jegunovce",:country_id=>145)
State.create(:id=>1960, :name=>"Karpoš",:country_id=>145)
State.create(:id=>1961, :name=>"Kavadarci",:country_id=>145)
State.create(:id=>1962, :name=>"Kičevo",:country_id=>145)
State.create(:id=>1963, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1964, :name=>"Kočani",:country_id=>145)
State.create(:id=>1965, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1966, :name=>"Krivogaštani",:country_id=>145)
State.create(:id=>1967, :name=>"Kruševo",:country_id=>145)
State.create(:id=>1968, :name=>"Kumanovo",:country_id=>145)
State.create(:id=>1969, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1970, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1971, :name=>"Negotino",:country_id=>145)
State.create(:id=>1972, :name=>"Ohrid",:country_id=>145)
State.create(:id=>1973, :name=>"Oslomej",:country_id=>145)
State.create(:id=>1974, :name=>"Petrovec",:country_id=>145)
State.create(:id=>1975, :name=>"Plasnica",:country_id=>145)
State.create(:id=>1976, :name=>"Prilep",:country_id=>145)
State.create(:id=>1977, :name=>"Probištip",:country_id=>145)
State.create(:id=>1978, :name=>"Radoviš",:country_id=>145)
State.create(:id=>1979, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1980, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1981, :name=>"Saraj",:country_id=>145)
State.create(:id=>1982, :name=>"Sopište",:country_id=>145)
State.create(:id=>1983, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1984, :name=>"Štip",:country_id=>145)
State.create(:id=>1985, :name=>"Struga",:country_id=>145)
State.create(:id=>1986, :name=>"Strumica",:country_id=>145)
State.create(:id=>1987, :name=>"Studeničani",:country_id=>145)
State.create(:id=>1988, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1989, :name=>"<NAME>",:country_id=>145)
State.create(:id=>1990, :name=>"Tearce",:country_id=>145)
State.create(:id=>1991, :name=>"Tetovo",:country_id=>145)
State.create(:id=>1992, :name=>"Veles",:country_id=>145)
State.create(:id=>1993, :name=>"Vevčani",:country_id=>145)
State.create(:id=>1994, :name=>"Vinica",:country_id=>145)
State.create(:id=>1995, :name=>"Vraneštica",:country_id=>145)
State.create(:id=>1996, :name=>"Vrapčište",:country_id=>145)
State.create(:id=>1997, :name=>"Zajas",:country_id=>145)
State.create(:id=>1998, :name=>"Zelenikovo",:country_id=>145)
State.create(:id=>1999, :name=>"Želino",:country_id=>145)
State.create(:id=>2000, :name=>"<NAME>",:country_id=>145)
State.create(:id=>2001, :name=>"<NAME>",:country_id=>145)
State.create(:id=>2002, :name=>"<NAME>",:country_id=>145)
State.create(:id=>2003, :name=>"Tombouctou",:country_id=>146)
State.create(:id=>2004, :name=>"Sikasso",:country_id=>146)
State.create(:id=>2005, :name=>"Ségou",:country_id=>146)
State.create(:id=>2006, :name=>"Mopti",:country_id=>146)
State.create(:id=>2007, :name=>"Koulikoro",:country_id=>146)
State.create(:id=>2008, :name=>"Kayes",:country_id=>146)
State.create(:id=>2009, :name=>"Gao",:country_id=>146)
State.create(:id=>2010, :name=>"Bamako",:country_id=>146)
State.create(:id=>2011, :name=>"Kidal",:country_id=>146)
State.create(:id=>2012, :name=>"Tanintharyi",:country_id=>147)
State.create(:id=>2013, :name=>"Shan",:country_id=>147)
State.create(:id=>2014, :name=>"Sagain",:country_id=>147)
State.create(:id=>2015, :name=>"Yangon",:country_id=>147)
State.create(:id=>2016, :name=>"Rakhine",:country_id=>147)
State.create(:id=>2017, :name=>"Bago",:country_id=>147)
State.create(:id=>2018, :name=>"Mon",:country_id=>147)
State.create(:id=>2019, :name=>"Mandalay",:country_id=>147)
State.create(:id=>2020, :name=>"Magway",:country_id=>147)
State.create(:id=>2021, :name=>"Kayah",:country_id=>147)
State.create(:id=>2022, :name=>"Kayin",:country_id=>147)
State.create(:id=>2023, :name=>"Kachin",:country_id=>147)
State.create(:id=>2024, :name=>"Ayeyarwady",:country_id=>147)
State.create(:id=>2025, :name=>"Chin",:country_id=>147)
State.create(:id=>2026, :name=>"Uvs",:country_id=>148)
State.create(:id=>2027, :name=>"Hovd",:country_id=>148)
State.create(:id=>2028, :name=>"Govĭ-Altay",:country_id=>148)
State.create(:id=>2029, :name=>"Dzavhan",:country_id=>148)
State.create(:id=>2030, :name=>"Bayan-Ölgiy",:country_id=>148)
State.create(:id=>2031, :name=>"Bayanhongor",:country_id=>148)
State.create(:id=>2032, :name=>"Ulaanbaatar",:country_id=>148)
State.create(:id=>2033, :name=>"Central Aymag",:country_id=>148)
State.create(:id=>2034, :name=>"Sühbaatar",:country_id=>148)
State.create(:id=>2035, :name=>"Selenge",:country_id=>148)
State.create(:id=>2036, :name=>"Övörhangay",:country_id=>148)
State.create(:id=>2037, :name=>"South Govĭ",:country_id=>148)
State.create(:id=>2038, :name=>"Hövsgöl",:country_id=>148)
State.create(:id=>2039, :name=>"Hentiy",:country_id=>148)
State.create(:id=>2040, :name=>"Middle Govĭ",:country_id=>148)
State.create(:id=>2041, :name=>"East Gobi Aymag",:country_id=>148)
State.create(:id=>2042, :name=>"Eastern",:country_id=>148)
State.create(:id=>2043, :name=>"Bulgan",:country_id=>148)
State.create(:id=>2044, :name=>"Arhangay",:country_id=>148)
State.create(:id=>2045, :name=>"Darhan Uul",:country_id=>148)
State.create(:id=>2046, :name=>"Govĭ-Sumber",:country_id=>148)
State.create(:id=>2047, :name=>"Orhon",:country_id=>148)
State.create(:id=>2048, :name=>"Macau",:country_id=>149)
State.create(:id=>2049, :name=>"Ilhas",:country_id=>149)
State.create(:id=>2050, :name=>"Rota Municipality",:country_id=>150)
State.create(:id=>2051, :name=>"Saipan Municipality",:country_id=>150)
State.create(:id=>2052, :name=>"Tinian Municipality",:country_id=>150)
State.create(:id=>2053, :name=>"Northern Islands Municipality",:country_id=>150)
State.create(:id=>2054, :name=>"Martinique",:country_id=>151)
State.create(:id=>2055, :name=>"Trarza",:country_id=>152)
State.create(:id=>2056, :name=>"<NAME>",:country_id=>152)
State.create(:id=>2057, :name=>"Tagant",:country_id=>152)
State.create(:id=>2058, :name=>"Nouakchott",:country_id=>152)
State.create(:id=>2059, :name=>"Inchiri",:country_id=>152)
State.create(:id=>2060, :name=>"<NAME>",:country_id=>152)
State.create(:id=>2061, :name=>"<NAME>",:country_id=>152)
State.create(:id=>2062, :name=>"Guidimaka",:country_id=>152)
State.create(:id=>2063, :name=>"Gorgol",:country_id=>152)
State.create(:id=>2064, :name=>"<NAME>",:country_id=>152)
State.create(:id=>2065, :name=>"Brakna",:country_id=>152)
State.create(:id=>2066, :name=>"Assaba",:country_id=>152)
State.create(:id=>2067, :name=>"Adrar",:country_id=>152)
State.create(:id=>2068, :name=>"<NAME>",:country_id=>153)
State.create(:id=>2069, :name=>"<NAME>",:country_id=>153)
State.create(:id=>2070, :name=>"<NAME>",:country_id=>153)
State.create(:id=>2071, :name=>"<NAME>",:country_id=>155)
State.create(:id=>2072, :name=>"Savanne",:country_id=>155)
State.create(:id=>2073, :name=>"<NAME>",:country_id=>155)
State.create(:id=>2074, :name=>"<NAME>",:country_id=>155)
State.create(:id=>2075, :name=>"<NAME>",:country_id=>155)
State.create(:id=>2076, :name=>"Pamplemousses",:country_id=>155)
State.create(:id=>2077, :name=>"Moka",:country_id=>155)
State.create(:id=>2078, :name=>"Grand Port",:country_id=>155)
State.create(:id=>2079, :name=>"Flacq",:country_id=>155)
State.create(:id=>2080, :name=>"Black River",:country_id=>155)
State.create(:id=>2081, :name=>"<NAME>",:country_id=>155)
State.create(:id=>2082, :name=>"Rodrigues",:country_id=>155)
State.create(:id=>2083, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2084, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2085, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2086, :name=>"Seenu",:country_id=>156)
State.create(:id=>2087, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2088, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2089, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2090, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2091, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2092, :name=>"Laamu",:country_id=>156)
State.create(:id=>2093, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2094, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2095, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2096, :name=>"<NAME>",:country_id=>156)
State.create(:id=>2097, :name=>"Gaafu Alifu Atholhu",:country_id=>156)
State.create(:id=>2098, :name=>"Faafu Atholhu",:country_id=>156)
State.create(:id=>2099, :name=>"Dhaalu Atholhu",:country_id=>156)
State.create(:id=>2100, :name=>"Baa Atholhu",:country_id=>156)
State.create(:id=>2101, :name=>"Alifu Atholhu",:country_id=>156)
State.create(:id=>2102, :name=>"Maale",:country_id=>156)
State.create(:id=>2103, :name=>"Southern Region",:country_id=>157)
State.create(:id=>2104, :name=>"Northern Region",:country_id=>157)
State.create(:id=>2105, :name=>"Central Region",:country_id=>157)
State.create(:id=>2106, :name=>"Yucatán",:country_id=>158)
State.create(:id=>2107, :name=>"Veracruz-Llave",:country_id=>158)
State.create(:id=>2108, :name=>"Tlaxcala",:country_id=>158)
State.create(:id=>2109, :name=>"Tamaulipas",:country_id=>158)
State.create(:id=>2110, :name=>"Tabasco",:country_id=>158)
State.create(:id=>2111, :name=>"Quintana Roo",:country_id=>158)
State.create(:id=>2112, :name=>"Querétaro",:country_id=>158)
State.create(:id=>2113, :name=>"Puebla",:country_id=>158)
State.create(:id=>2114, :name=>"Oaxaca",:country_id=>158)
State.create(:id=>2115, :name=>"<NAME>",:country_id=>158)
State.create(:id=>2116, :name=>"Morelos",:country_id=>158)
State.create(:id=>2117, :name=>"México",:country_id=>158)
State.create(:id=>2118, :name=>"Hidalgo",:country_id=>158)
State.create(:id=>2119, :name=>"Guerrero",:country_id=>158)
State.create(:id=>2120, :name=>"The Federal District",:country_id=>158)
State.create(:id=>2121, :name=>"Chiapas",:country_id=>158)
State.create(:id=>2122, :name=>"Campeche",:country_id=>158)
State.create(:id=>2123, :name=>"Zacatecas",:country_id=>158)
State.create(:id=>2124, :name=>"Sonora",:country_id=>158)
State.create(:id=>2125, :name=>"Sinaloa",:country_id=>158)
State.create(:id=>2126, :name=>"<NAME>",:country_id=>158)
State.create(:id=>2127, :name=>"Nayarit",:country_id=>158)
State.create(:id=>2128, :name=>"Michoacán",:country_id=>158)
State.create(:id=>2129, :name=>"Jalisco",:country_id=>158)
State.create(:id=>2130, :name=>"Guanajuato",:country_id=>158)
State.create(:id=>2131, :name=>"Durango",:country_id=>158)
State.create(:id=>2132, :name=>"Colima",:country_id=>158)
State.create(:id=>2133, :name=>"Coahuila",:country_id=>158)
State.create(:id=>2134, :name=>"Chihuahua",:country_id=>158)
State.create(:id=>2135, :name=>"Baja California Sur",:country_id=>158)
State.create(:id=>2136, :name=>"Baja California",:country_id=>158)
State.create(:id=>2137, :name=>"Aguascalientes",:country_id=>158)
State.create(:id=>2138, :name=>"Melaka",:country_id=>159)
State.create(:id=>2139, :name=>"Terengganu",:country_id=>159)
State.create(:id=>2140, :name=>"Selangor",:country_id=>159)
State.create(:id=>2141, :name=>"Sarawak",:country_id=>159)
State.create(:id=>2142, :name=>"Sabah",:country_id=>159)
State.create(:id=>2143, :name=>"Perlis",:country_id=>159)
State.create(:id=>2144, :name=>"Perak",:country_id=>159)
State.create(:id=>2145, :name=>"Pahang",:country_id=>159)
State.create(:id=>2146, :name=>"<NAME>",:country_id=>159)
State.create(:id=>2147, :name=>"Kelantan",:country_id=>159)
State.create(:id=>2148, :name=>"<NAME>",:country_id=>159)
State.create(:id=>2149, :name=>"<NAME>",:country_id=>159)
State.create(:id=>2150, :name=>"Kedah",:country_id=>159)
State.create(:id=>2151, :name=>"Johor",:country_id=>159)
State.create(:id=>2152, :name=>"Labuan",:country_id=>159)
State.create(:id=>2153, :name=>"Putrajaya",:country_id=>159)
State.create(:id=>2154, :name=>"Zambézia",:country_id=>160)
State.create(:id=>2155, :name=>"Tete",:country_id=>160)
State.create(:id=>2156, :name=>"Sofala",:country_id=>160)
State.create(:id=>2157, :name=>"Niassa",:country_id=>160)
State.create(:id=>2158, :name=>"Nampula",:country_id=>160)
State.create(:id=>2159, :name=>"Maputo",:country_id=>160)
State.create(:id=>2160, :name=>"Manica",:country_id=>160)
State.create(:id=>2161, :name=>"Inhambane",:country_id=>160)
State.create(:id=>2162, :name=>"Gaza",:country_id=>160)
State.create(:id=>2163, :name=>"<NAME>",:country_id=>160)
State.create(:id=>2164, :name=>"<NAME>",:country_id=>160)
State.create(:id=>2165, :name=>"Caprivi",:country_id=>161)
State.create(:id=>2166, :name=>"Khomas",:country_id=>161)
State.create(:id=>2167, :name=>"Erongo",:country_id=>161)
State.create(:id=>2168, :name=>"Hardap",:country_id=>161)
State.create(:id=>2169, :name=>"Karas",:country_id=>161)
State.create(:id=>2170, :name=>"Kunene",:country_id=>161)
State.create(:id=>2171, :name=>"Ohangwena",:country_id=>161)
State.create(:id=>2172, :name=>"Okavango",:country_id=>161)
State.create(:id=>2173, :name=>"Omaheke",:country_id=>161)
State.create(:id=>2174, :name=>"Omusati",:country_id=>161)
State.create(:id=>2175, :name=>"Oshana",:country_id=>161)
State.create(:id=>2176, :name=>"Oshikoto",:country_id=>161)
State.create(:id=>2177, :name=>"Otjozondjupa",:country_id=>161)
State.create(:id=>2178, :name=>"Province Sud",:country_id=>162)
State.create(:id=>2179, :name=>"Province Nord",:country_id=>162)
State.create(:id=>2180, :name=>"Province des îles Loyauté",:country_id=>162)
State.create(:id=>2181, :name=>"Zinder",:country_id=>163)
State.create(:id=>2182, :name=>"Tahoua",:country_id=>163)
State.create(:id=>2183, :name=>"Maradi",:country_id=>163)
State.create(:id=>2184, :name=>"Dosso",:country_id=>163)
State.create(:id=>2185, :name=>"Diffa",:country_id=>163)
State.create(:id=>2186, :name=>"Agadez",:country_id=>163)
State.create(:id=>2187, :name=>"Tillabéri",:country_id=>163)
State.create(:id=>2188, :name=>"Niamey",:country_id=>163)
State.create(:id=>2189, :name=>"Sokoto",:country_id=>165)
State.create(:id=>2190, :name=>"Rivers",:country_id=>165)
State.create(:id=>2191, :name=>"Plateau",:country_id=>165)
State.create(:id=>2192, :name=>"Oyo",:country_id=>165)
State.create(:id=>2193, :name=>"Ondo",:country_id=>165)
State.create(:id=>2194, :name=>"Ogun",:country_id=>165)
State.create(:id=>2195, :name=>"Niger",:country_id=>165)
State.create(:id=>2196, :name=>"Lagos",:country_id=>165)
State.create(:id=>2197, :name=>"Kwara",:country_id=>165)
State.create(:id=>2198, :name=>"Katsina",:country_id=>165)
State.create(:id=>2199, :name=>"Kano",:country_id=>165)
State.create(:id=>2200, :name=>"Kaduna",:country_id=>165)
State.create(:id=>2201, :name=>"Imo State",:country_id=>165)
State.create(:id=>2202, :name=>"Cross River",:country_id=>165)
State.create(:id=>2203, :name=>"Borno",:country_id=>165)
State.create(:id=>2204, :name=>"Benue State",:country_id=>165)
State.create(:id=>2205, :name=>"Bauchi State",:country_id=>165)
State.create(:id=>2206, :name=>"Anambra State",:country_id=>165)
State.create(:id=>2207, :name=>"Akwa Ibom",:country_id=>165)
State.create(:id=>2208, :name=>"Abuja Federal Capital Territory",:country_id=>165)
State.create(:id=>2209, :name=>"Abia",:country_id=>165)
State.create(:id=>2210, :name=>"Delta State",:country_id=>165)
State.create(:id=>2211, :name=>"Adamawa State",:country_id=>165)
State.create(:id=>2212, :name=>"Edo",:country_id=>165)
State.create(:id=>2213, :name=>"Enugu State",:country_id=>165)
State.create(:id=>2214, :name=>"Jigawa State",:country_id=>165)
State.create(:id=>2215, :name=>"Bayelsa",:country_id=>165)
State.create(:id=>2216, :name=>"Ebonyi",:country_id=>165)
State.create(:id=>2217, :name=>"Ekiti",:country_id=>165)
State.create(:id=>2218, :name=>"Gombe",:country_id=>165)
State.create(:id=>2219, :name=>"Nassarawa",:country_id=>165)
State.create(:id=>2220, :name=>"Zamfara",:country_id=>165)
State.create(:id=>2221, :name=>"Kebbi",:country_id=>165)
State.create(:id=>2222, :name=>"Kogi",:country_id=>165)
State.create(:id=>2223, :name=>"Osun",:country_id=>165)
State.create(:id=>2224, :name=>"<NAME>",:country_id=>165)
State.create(:id=>2225, :name=>"Yobe",:country_id=>165)
State.create(:id=>2226, :name=>"Rivas",:country_id=>166)
State.create(:id=>2227, :name=>"<NAME>",:country_id=>166)
State.create(:id=>2228, :name=>"<NAME>",:country_id=>166)
State.create(:id=>2229, :name=>"Matagalpa",:country_id=>166)
State.create(:id=>2230, :name=>"Masaya",:country_id=>166)
State.create(:id=>2231, :name=>"Managua",:country_id=>166)
State.create(:id=>2232, :name=>"Madriz",:country_id=>166)
State.create(:id=>2233, :name=>"León",:country_id=>166)
State.create(:id=>2234, :name=>"Jinotega",:country_id=>166)
State.create(:id=>2235, :name=>"Granada",:country_id=>166)
State.create(:id=>2236, :name=>"Estelí",:country_id=>166)
State.create(:id=>2237, :name=>"Chontales",:country_id=>166)
State.create(:id=>2238, :name=>"Chinandega",:country_id=>166)
State.create(:id=>2239, :name=>"Carazo",:country_id=>166)
State.create(:id=>2240, :name=>"Boaco",:country_id=>166)
State.create(:id=>2241, :name=>"Atlántico Norte",:country_id=>166)
State.create(:id=>2242, :name=>"Atlántico Sur",:country_id=>166)
State.create(:id=>2243, :name=>"South Holland",:country_id=>167)
State.create(:id=>2244, :name=>"Zeeland",:country_id=>167)
State.create(:id=>2245, :name=>"Utrecht",:country_id=>167)
State.create(:id=>2246, :name=>"Overijssel",:country_id=>167)
State.create(:id=>2247, :name=>"North Holland",:country_id=>167)
State.create(:id=>2248, :name=>"North Brabant",:country_id=>167)
State.create(:id=>2249, :name=>"Limburg",:country_id=>167)
State.create(:id=>2250, :name=>"Groningen",:country_id=>167)
State.create(:id=>2251, :name=>"Gelderland",:country_id=>167)
State.create(:id=>2252, :name=>"Friesland",:country_id=>167)
State.create(:id=>2253, :name=>"Drenthe",:country_id=>167)
State.create(:id=>2254, :name=>"Flevoland",:country_id=>167)
State.create(:id=>2255, :name=>"Finnmark",:country_id=>168)
State.create(:id=>2256, :name=>"Vestfold",:country_id=>168)
State.create(:id=>2257, :name=>"Vest-Agder",:country_id=>168)
State.create(:id=>2258, :name=>"Troms",:country_id=>168)
State.create(:id=>2259, :name=>"Telemark",:country_id=>168)
State.create(:id=>2260, :name=>"Sør-Trøndelag",:country_id=>168)
State.create(:id=>2261, :name=>"<NAME>",:country_id=>168)
State.create(:id=>2262, :name=>"Rogaland",:country_id=>168)
State.create(:id=>2263, :name=>"Østfold",:country_id=>168)
State.create(:id=>2264, :name=>"Oslo",:country_id=>168)
State.create(:id=>2265, :name=>"Oppland",:country_id=>168)
State.create(:id=>2266, :name=>"Nord-Trøndelag",:country_id=>168)
State.create(:id=>2267, :name=>"Nordland",:country_id=>168)
State.create(:id=>2268, :name=>"<NAME>",:country_id=>168)
State.create(:id=>2269, :name=>"Hordaland",:country_id=>168)
State.create(:id=>2270, :name=>"Hedmark",:country_id=>168)
State.create(:id=>2271, :name=>"Buskerud",:country_id=>168)
State.create(:id=>2272, :name=>"Aust-Agder",:country_id=>168)
State.create(:id=>2273, :name=>"Akershus",:country_id=>168)
State.create(:id=>2274, :name=>"Far Western Region",:country_id=>169)
State.create(:id=>2275, :name=>"Mid Western Region",:country_id=>169)
State.create(:id=>2276, :name=>"Central Region",:country_id=>169)
State.create(:id=>2277, :name=>"Eastern Region",:country_id=>169)
State.create(:id=>2278, :name=>"Western Region",:country_id=>169)
State.create(:id=>2279, :name=>"Yaren",:country_id=>170)
State.create(:id=>2280, :name=>"Uaboe",:country_id=>170)
State.create(:id=>2281, :name=>"Nibok",:country_id=>170)
State.create(:id=>2282, :name=>"Meneng",:country_id=>170)
State.create(:id=>2283, :name=>"Ijuw",:country_id=>170)
State.create(:id=>2284, :name=>"Ewa",:country_id=>170)
State.create(:id=>2285, :name=>"Denigomodu",:country_id=>170)
State.create(:id=>2286, :name=>"Buada",:country_id=>170)
State.create(:id=>2287, :name=>"Boe",:country_id=>170)
State.create(:id=>2288, :name=>"Baiti",:country_id=>170)
State.create(:id=>2289, :name=>"Anibare",:country_id=>170)
State.create(:id=>2290, :name=>"Anetan",:country_id=>170)
State.create(:id=>2291, :name=>"Anabar",:country_id=>170)
State.create(:id=>2292, :name=>"Aiwo",:country_id=>170)
State.create(:id=>2293, :name=>"Wellington",:country_id=>172)
State.create(:id=>2294, :name=>"Manawatu-Wanganui",:country_id=>172)
State.create(:id=>2295, :name=>"Waikato",:country_id=>172)
State.create(:id=>2296, :name=>"Tasman",:country_id=>172)
State.create(:id=>2297, :name=>"Taranaki",:country_id=>172)
State.create(:id=>2298, :name=>"Southland",:country_id=>172)
State.create(:id=>2299, :name=>"Bay of Plenty",:country_id=>172)
State.create(:id=>2300, :name=>"Northland",:country_id=>172)
State.create(:id=>2301, :name=>"Marlborough",:country_id=>172)
State.create(:id=>2302, :name=>"Hawke's Bay",:country_id=>172)
State.create(:id=>2303, :name=>"Gisborne",:country_id=>172)
State.create(:id=>2304, :name=>"Canterbury",:country_id=>172)
State.create(:id=>2305, :name=>"Auckland",:country_id=>172)
State.create(:id=>2306, :name=>"Chatham Islands",:country_id=>172)
State.create(:id=>2307, :name=>"Nelson",:country_id=>172)
State.create(:id=>2308, :name=>"Otago",:country_id=>172)
State.create(:id=>2309, :name=>"West Coast",:country_id=>172)
State.create(:id=>2310, :name=>"Ad Dākhilīyah",:country_id=>173)
State.create(:id=>2311, :name=>"Al Bāţinah",:country_id=>173)
State.create(:id=>2312, :name=>"Al Wusţá",:country_id=>173)
State.create(:id=>2313, :name=>"<NAME>",:country_id=>173)
State.create(:id=>2314, :name=>"Az̧ Z̧āhirah",:country_id=>173)
State.create(:id=>2315, :name=>"Masqaţ",:country_id=>173)
State.create(:id=>2316, :name=>"Muḩāfaz̧at Musandam",:country_id=>173)
State.create(:id=>2317, :name=>"Z̧ufār",:country_id=>173)
State.create(:id=>2318, :name=>"Muḩāfaz̧at al Buraymī",:country_id=>173)
State.create(:id=>2319, :name=>"Veraguas",:country_id=>174)
State.create(:id=>2320, :name=>"Kuna Yala",:country_id=>174)
State.create(:id=>2321, :name=>"Panamá",:country_id=>174)
State.create(:id=>2322, :name=>"Los Santos",:country_id=>174)
State.create(:id=>2323, :name=>"Herrera",:country_id=>174)
State.create(:id=>2324, :name=>"Darién",:country_id=>174)
State.create(:id=>2325, :name=>"Colón",:country_id=>174)
State.create(:id=>2326, :name=>"Coclé",:country_id=>174)
State.create(:id=>2327, :name=>"Chiriquí",:country_id=>174)
State.create(:id=>2328, :name=>"Bocas del Toro",:country_id=>174)
State.create(:id=>2329, :name=>"Emberá",:country_id=>174)
State.create(:id=>2330, :name=>"Ngöbe-Buglé",:country_id=>174)
State.create(:id=>2331, :name=>"Ucayali",:country_id=>175)
State.create(:id=>2332, :name=>"Tumbes",:country_id=>175)
State.create(:id=>2333, :name=>"<NAME>",:country_id=>175)
State.create(:id=>2334, :name=>"Piura",:country_id=>175)
State.create(:id=>2335, :name=>"Loreto",:country_id=>175)
State.create(:id=>2336, :name=>"Lambayeque",:country_id=>175)
State.create(:id=>2337, :name=>"La Libertad",:country_id=>175)
State.create(:id=>2338, :name=>"Huanuco",:country_id=>175)
State.create(:id=>2339, :name=>"Cajamarca",:country_id=>175)
State.create(:id=>2340, :name=>"Ancash",:country_id=>175)
State.create(:id=>2341, :name=>"Amazonas",:country_id=>175)
State.create(:id=>2342, :name=>"Tacna",:country_id=>175)
State.create(:id=>2343, :name=>"Puno",:country_id=>175)
State.create(:id=>2344, :name=>"Pasco",:country_id=>175)
State.create(:id=>2345, :name=>"Moquegua",:country_id=>175)
State.create(:id=>2346, :name=>"<NAME>",:country_id=>175)
State.create(:id=>2347, :name=>"Provincia de Lima",:country_id=>175)
State.create(:id=>2348, :name=>"Lima",:country_id=>175)
State.create(:id=>2349, :name=>"Junín",:country_id=>175)
State.create(:id=>2350, :name=>"Ica",:country_id=>175)
State.create(:id=>2351, :name=>"Huancavelica",:country_id=>175)
State.create(:id=>2352, :name=>"Cusco",:country_id=>175)
State.create(:id=>2353, :name=>"Callao",:country_id=>175)
State.create(:id=>2354, :name=>"Ayacucho",:country_id=>175)
State.create(:id=>2355, :name=>"Arequipa",:country_id=>175)
State.create(:id=>2356, :name=>"Apurímac",:country_id=>175)
State.create(:id=>2357, :name=>"Îles Marquises",:country_id=>176)
State.create(:id=>2358, :name=>"Îles Tuamotu-Gambier",:country_id=>176)
State.create(:id=>2359, :name=>"Îles Sous-le-Vent",:country_id=>176)
State.create(:id=>2360, :name=>"Îles du Vent",:country_id=>176)
State.create(:id=>2361, :name=>"Îles Australes",:country_id=>176)
State.create(:id=>2362, :name=>"West New Britain",:country_id=>177)
State.create(:id=>2363, :name=>"Western Province",:country_id=>177)
State.create(:id=>2364, :name=>"Western Highlands",:country_id=>177)
State.create(:id=>2365, :name=>"Southern Highlands",:country_id=>177)
State.create(:id=>2366, :name=>"Sandaun",:country_id=>177)
State.create(:id=>2367, :name=>"Bougainville",:country_id=>177)
State.create(:id=>2368, :name=>"Northern Province",:country_id=>177)
State.create(:id=>2369, :name=>"New Ireland",:country_id=>177)
State.create(:id=>2370, :name=>"National Capital District",:country_id=>177)
State.create(:id=>2371, :name=>"Morobe",:country_id=>177)
State.create(:id=>2372, :name=>"Manus",:country_id=>177)
State.create(:id=>2373, :name=>"Madang",:country_id=>177)
State.create(:id=>2374, :name=>"Gulf",:country_id=>177)
State.create(:id=>2375, :name=>"Enga",:country_id=>177)
State.create(:id=>2376, :name=>"East Sepik",:country_id=>177)
State.create(:id=>2377, :name=>"East New Britain",:country_id=>177)
State.create(:id=>2378, :name=>"Eastern Highlands",:country_id=>177)
State.create(:id=>2379, :name=>"Chimbu",:country_id=>177)
State.create(:id=>2380, :name=>"Milne Bay",:country_id=>177)
State.create(:id=>2381, :name=>"Central Province",:country_id=>177)
State.create(:id=>2382, :name=>"Autonomous Region in Muslim Mindanao",:country_id=>178)
State.create(:id=>2383, :name=>"Northern Mindanao",:country_id=>178)
State.create(:id=>2384, :name=>"Mimaropa",:country_id=>178)
State.create(:id=>2385, :name=>"Cagayan Valley",:country_id=>178)
State.create(:id=>2386, :name=>"Soccsksargen",:country_id=>178)
State.create(:id=>2387, :name=>"Caraga",:country_id=>178)
State.create(:id=>2388, :name=>"Cordillera Administrative Region",:country_id=>178)
State.create(:id=>2389, :name=>"Ilocos",:country_id=>178)
State.create(:id=>2390, :name=>"Calabarzon",:country_id=>178)
State.create(:id=>2391, :name=>"Western Visayas",:country_id=>178)
State.create(:id=>2392, :name=>"Central Luzon",:country_id=>178)
State.create(:id=>2393, :name=>"Central Visayas",:country_id=>178)
State.create(:id=>2394, :name=>"Eastern Visayas",:country_id=>178)
State.create(:id=>2395, :name=>"Zamboanga Peninsula",:country_id=>178)
State.create(:id=>2396, :name=>"Davao",:country_id=>178)
State.create(:id=>2397, :name=>"Bicol",:country_id=>178)
State.create(:id=>2398, :name=>"National Capital Region",:country_id=>178)
State.create(:id=>2399, :name=>"Islāmābād",:country_id=>179)
State.create(:id=>2400, :name=>"Sindh",:country_id=>179)
State.create(:id=>2401, :name=>"Punjab",:country_id=>179)
State.create(:id=>2402, :name=>"North West Frontier Province",:country_id=>179)
State.create(:id=>2403, :name=>"Gilgit-Baltistan",:country_id=>179)
State.create(:id=>2404, :name=>"Federally Administered Tribal Areas",:country_id=>179)
State.create(:id=>2405, :name=>"Balochistān",:country_id=>179)
State.create(:id=>2406, :name=>"<NAME>",:country_id=>179)
State.create(:id=>2407, :name=>"Lublin Voivodeship",:country_id=>180)
State.create(:id=>2408, :name=>"Lesser Poland Voivodeship",:country_id=>180)
State.create(:id=>2409, :name=>"Masovian Voivodeship",:country_id=>180)
State.create(:id=>2410, :name=>"Subcarpathian Voivodeship",:country_id=>180)
State.create(:id=>2411, :name=>"Podlasie",:country_id=>180)
State.create(:id=>2412, :name=>"Świętokrzyskie",:country_id=>180)
State.create(:id=>2413, :name=>"Warmian-Masurian Voivodeship",:country_id=>180)
State.create(:id=>2414, :name=>"Lower Silesian Voivodeship",:country_id=>180)
State.create(:id=>2415, :name=>"Łódź Voivodeship",:country_id=>180)
State.create(:id=>2416, :name=>"Lubusz",:country_id=>180)
State.create(:id=>2417, :name=>"Opole Voivodeship",:country_id=>180)
State.create(:id=>2418, :name=>"Pomeranian Voivodeship",:country_id=>180)
State.create(:id=>2419, :name=>"Silesian Voivodeship",:country_id=>180)
State.create(:id=>2420, :name=>"Greater Poland Voivodeship",:country_id=>180)
State.create(:id=>2421, :name=>"West Pomeranian Voivodeship",:country_id=>180)
State.create(:id=>2422, :name=>"Kujawsko-Pomorskie",:country_id=>180)
State.create(:id=>2423, :name=>"Saint-Pierre",:country_id=>181)
State.create(:id=>2424, :name=>"Miquelon-Langlade",:country_id=>181)
State.create(:id=>2425, :name=>"Adjuntas",:country_id=>183)
State.create(:id=>2426, :name=>"Aguada",:country_id=>183)
State.create(:id=>2427, :name=>"Aguadilla",:country_id=>183)
State.create(:id=>2428, :name=>"Aguas Buenas",:country_id=>183)
State.create(:id=>2429, :name=>"Aibonito",:country_id=>183)
State.create(:id=>2430, :name=>"Añasco",:country_id=>183)
State.create(:id=>2431, :name=>"Arecibo",:country_id=>183)
State.create(:id=>2432, :name=>"Arroyo",:country_id=>183)
State.create(:id=>2433, :name=>"Barceloneta",:country_id=>183)
State.create(:id=>2434, :name=>"Barranquitas",:country_id=>183)
State.create(:id=>2435, :name=>"Bayamón",:country_id=>183)
State.create(:id=>2436, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2437, :name=>"Caguas",:country_id=>183)
State.create(:id=>2438, :name=>"Camuy",:country_id=>183)
State.create(:id=>2439, :name=>"Canovanas",:country_id=>183)
State.create(:id=>2440, :name=>"Carolina",:country_id=>183)
State.create(:id=>2441, :name=>"Catano",:country_id=>183)
State.create(:id=>2442, :name=>"Cayey",:country_id=>183)
State.create(:id=>2443, :name=>"Ceiba",:country_id=>183)
State.create(:id=>2444, :name=>"Ciales",:country_id=>183)
State.create(:id=>2445, :name=>"Cidra",:country_id=>183)
State.create(:id=>2446, :name=>"Coamo",:country_id=>183)
State.create(:id=>2447, :name=>"Comerio",:country_id=>183)
State.create(:id=>2448, :name=>"Corozal",:country_id=>183)
State.create(:id=>2449, :name=>"Culebra",:country_id=>183)
State.create(:id=>2450, :name=>"Dorado",:country_id=>183)
State.create(:id=>2451, :name=>"Fajardo",:country_id=>183)
State.create(:id=>2452, :name=>"Florida",:country_id=>183)
State.create(:id=>2453, :name=>"Guanica",:country_id=>183)
State.create(:id=>2454, :name=>"Guayama",:country_id=>183)
State.create(:id=>2455, :name=>"Guayanilla",:country_id=>183)
State.create(:id=>2456, :name=>"Guaynabo",:country_id=>183)
State.create(:id=>2457, :name=>"Gurabo",:country_id=>183)
State.create(:id=>2458, :name=>"Hatillo",:country_id=>183)
State.create(:id=>2459, :name=>"Hormigueros",:country_id=>183)
State.create(:id=>2460, :name=>"Humacao",:country_id=>183)
State.create(:id=>2461, :name=>"Isabela",:country_id=>183)
State.create(:id=>2462, :name=>"Municipio de Jayuya",:country_id=>183)
State.create(:id=>2463, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2464, :name=>"Municipio de Juncos",:country_id=>183)
State.create(:id=>2465, :name=>"Lajas",:country_id=>183)
State.create(:id=>2466, :name=>"Lares",:country_id=>183)
State.create(:id=>2467, :name=>"Las Marias",:country_id=>183)
State.create(:id=>2468, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2469, :name=>"Loiza",:country_id=>183)
State.create(:id=>2470, :name=>"Luquillo",:country_id=>183)
State.create(:id=>2471, :name=>"Manati",:country_id=>183)
State.create(:id=>2472, :name=>"Maricao",:country_id=>183)
State.create(:id=>2473, :name=>"Maunabo",:country_id=>183)
State.create(:id=>2474, :name=>"Mayaguez",:country_id=>183)
State.create(:id=>2475, :name=>"Moca",:country_id=>183)
State.create(:id=>2476, :name=>"Morovis",:country_id=>183)
State.create(:id=>2477, :name=>"Naguabo",:country_id=>183)
State.create(:id=>2478, :name=>"Naranjito",:country_id=>183)
State.create(:id=>2479, :name=>"Orocovis",:country_id=>183)
State.create(:id=>2480, :name=>"Patillas",:country_id=>183)
State.create(:id=>2481, :name=>"Penuelas",:country_id=>183)
State.create(:id=>2482, :name=>"Ponce",:country_id=>183)
State.create(:id=>2483, :name=>"Rincon",:country_id=>183)
State.create(:id=>2484, :name=>"Quebradillas",:country_id=>183)
State.create(:id=>2485, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2486, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2487, :name=>"Salinas",:country_id=>183)
State.create(:id=>2488, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2489, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2490, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2491, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2492, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2493, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2494, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2495, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2496, :name=>"Utuado",:country_id=>183)
State.create(:id=>2497, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2498, :name=>"<NAME>",:country_id=>183)
State.create(:id=>2499, :name=>"Villalba",:country_id=>183)
State.create(:id=>2500, :name=>"Yabucoa",:country_id=>183)
State.create(:id=>2501, :name=>"Yauco",:country_id=>183)
State.create(:id=>2502, :name=>"Vieques",:country_id=>183)
State.create(:id=>2503, :name=>"<NAME>",:country_id=>184)
State.create(:id=>2504, :name=>"West Bank",:country_id=>184)
State.create(:id=>2505, :name=>"Setúbal",:country_id=>185)
State.create(:id=>2506, :name=>"Santarém",:country_id=>185)
State.create(:id=>2507, :name=>"Portalegre",:country_id=>185)
State.create(:id=>2508, :name=>"Lisbon",:country_id=>185)
State.create(:id=>2509, :name=>"Leiria",:country_id=>185)
State.create(:id=>2510, :name=>"Faro",:country_id=>185)
State.create(:id=>2511, :name=>"Évora",:country_id=>185)
State.create(:id=>2512, :name=>"<NAME>",:country_id=>185)
State.create(:id=>2513, :name=>"Beja",:country_id=>185)
State.create(:id=>2514, :name=>"Madeira",:country_id=>185)
State.create(:id=>2515, :name=>"Viseu",:country_id=>185)
State.create(:id=>2516, :name=>"<NAME>",:country_id=>185)
State.create(:id=>2517, :name=>"<NAME>",:country_id=>185)
State.create(:id=>2518, :name=>"Porto",:country_id=>185)
State.create(:id=>2519, :name=>"Guarda",:country_id=>185)
State.create(:id=>2520, :name=>"Coimbra",:country_id=>185)
State.create(:id=>2521, :name=>"Bragança",:country_id=>185)
State.create(:id=>2522, :name=>"Braga",:country_id=>185)
State.create(:id=>2523, :name=>"Aveiro",:country_id=>185)
State.create(:id=>2524, :name=>"Azores",:country_id=>185)
State.create(:id=>2525, :name=>"Ngatpang",:country_id=>186)
State.create(:id=>2526, :name=>"Sonsorol",:country_id=>186)
State.create(:id=>2527, :name=>"Kayangel",:country_id=>186)
State.create(:id=>2528, :name=>"State of Hatohobei",:country_id=>186)
State.create(:id=>2529, :name=>"Aimeliik",:country_id=>186)
State.create(:id=>2530, :name=>"Airai",:country_id=>186)
State.create(:id=>2531, :name=>"Angaur",:country_id=>186)
State.create(:id=>2532, :name=>"Koror",:country_id=>186)
State.create(:id=>2533, :name=>"Melekeok",:country_id=>186)
State.create(:id=>2534, :name=>"Ngaraard",:country_id=>186)
State.create(:id=>2535, :name=>"Ngchesar",:country_id=>186)
State.create(:id=>2536, :name=>"Ngarchelong",:country_id=>186)
State.create(:id=>2537, :name=>"Ngardmau",:country_id=>186)
State.create(:id=>2538, :name=>"State of Ngeremlengui",:country_id=>186)
State.create(:id=>2539, :name=>"Ngiwal",:country_id=>186)
State.create(:id=>2540, :name=>"Peleliu",:country_id=>186)
State.create(:id=>2541, :name=>"<NAME>",:country_id=>187)
State.create(:id=>2542, :name=>"<NAME>",:country_id=>187)
State.create(:id=>2543, :name=>"Paraguarí",:country_id=>187)
State.create(:id=>2544, :name=>"Ñeembucú",:country_id=>187)
State.create(:id=>2545, :name=>"Misiones",:country_id=>187)
State.create(:id=>2546, :name=>"Itapúa",:country_id=>187)
State.create(:id=>2547, :name=>"Guairá",:country_id=>187)
State.create(:id=>2548, :name=>"Cordillera",:country_id=>187)
State.create(:id=>2549, :name=>"Concepción",:country_id=>187)
State.create(:id=>2550, :name=>"Central",:country_id=>187)
State.create(:id=>2551, :name=>"Canindeyú",:country_id=>187)
State.create(:id=>2552, :name=>"Caazapá",:country_id=>187)
State.create(:id=>2553, :name=>"Caaguazú",:country_id=>187)
State.create(:id=>2554, :name=>"Amambay",:country_id=>187)
State.create(:id=>2555, :name=>"Alto Paraná",:country_id=>187)
State.create(:id=>2556, :name=>"Alto Paraguay",:country_id=>187)
State.create(:id=>2557, :name=>"Asunción",:country_id=>187)
State.create(:id=>2558, :name=>"Boquerón",:country_id=>187)
State.create(:id=>2559, :name=>"<NAME>",:country_id=>188)
State.create(:id=>2560, :name=>"<NAME>",:country_id=>188)
State.create(:id=>2561, :name=>"Al Khawr",:country_id=>188)
State.create(:id=>2562, :name=>"Umm Şalāl",:country_id=>188)
State.create(:id=>2563, :name=>"Al Jumaylīyah",:country_id=>188)
State.create(:id=>2564, :name=>"<NAME>",:country_id=>188)
State.create(:id=>2565, :name=>"<NAME>",:country_id=>188)
State.create(:id=>2566, :name=>"<NAME>akrah",:country_id=>188)
State.create(:id=>2567, :name=>"<NAME>",:country_id=>188)
State.create(:id=>2568, :name=>"<NAME> Sa‘īd",:country_id=>188)
State.create(:id=>2569, :name=>"Réunion",:country_id=>189)
State.create(:id=>2570, :name=>"<NAME>",:country_id=>190)
State.create(:id=>2571, :name=>"Vâlcea",:country_id=>190)
State.create(:id=>2572, :name=>"Vaslui",:country_id=>190)
State.create(:id=>2573, :name=>"Tulcea",:country_id=>190)
State.create(:id=>2574, :name=>"Timiş",:country_id=>190)
State.create(:id=>2575, :name=>"Teleorman",:country_id=>190)
State.create(:id=>2576, :name=>"Suceava",:country_id=>190)
State.create(:id=>2577, :name=>"Sibiu",:country_id=>190)
State.create(:id=>2578, :name=>"<NAME>",:country_id=>190)
State.create(:id=>2579, :name=>"Sălaj",:country_id=>190)
State.create(:id=>2580, :name=>"Prahova",:country_id=>190)
State.create(:id=>2581, :name=>"Olt",:country_id=>190)
State.create(:id=>2582, :name=>"Neamţ",:country_id=>190)
State.create(:id=>2583, :name=>"Mureş",:country_id=>190)
State.create(:id=>2584, :name=>"Mehedinţi",:country_id=>190)
State.create(:id=>2585, :name=>"Maramureş",:country_id=>190)
State.create(:id=>2586, :name=>"Iaşi",:country_id=>190)
State.create(:id=>2587, :name=>"Ialomiţa",:country_id=>190)
State.create(:id=>2588, :name=>"Hunedoara",:country_id=>190)
State.create(:id=>2589, :name=>"Harghita",:country_id=>190)
State.create(:id=>2590, :name=>"Gorj",:country_id=>190)
State.create(:id=>2591, :name=>"Giurgiu",:country_id=>190)
State.create(:id=>2592, :name=>"Galaţi",:country_id=>190)
State.create(:id=>2593, :name=>"Dolj",:country_id=>190)
State.create(:id=>2594, :name=>"Dâmboviţa",:country_id=>190)
State.create(:id=>2595, :name=>"Covasna",:country_id=>190)
State.create(:id=>2596, :name=>"Constanţa",:country_id=>190)
State.create(:id=>2597, :name=>"Cluj",:country_id=>190)
State.create(:id=>2598, :name=>"Caraş-Severin",:country_id=>190)
State.create(:id=>2599, :name=>"Călăraşi",:country_id=>190)
State.create(:id=>2600, :name=>"Buzău",:country_id=>190)
State.create(:id=>2601, :name=>"Bucureşti",:country_id=>190)
State.create(:id=>2602, :name=>"Braşov",:country_id=>190)
State.create(:id=>2603, :name=>"Brăila",:country_id=>190)
State.create(:id=>2604, :name=>"Botoşani",:country_id=>190)
State.create(:id=>2605, :name=>"Bistriţa-Năsăud",:country_id=>190)
State.create(:id=>2606, :name=>"Bihor",:country_id=>190)
State.create(:id=>2607, :name=>"Bacău",:country_id=>190)
State.create(:id=>2608, :name=>"Argeş",:country_id=>190)
State.create(:id=>2609, :name=>"Arad",:country_id=>190)
State.create(:id=>2610, :name=>"Alba",:country_id=>190)
State.create(:id=>2611, :name=>"Ilfov",:country_id=>190)
State.create(:id=>2612, :name=>"<NAME>",:country_id=>191)
State.create(:id=>2613, :name=>"Central Serbia",:country_id=>191)
State.create(:id=>2614, :name=>"Jaroslavl",:country_id=>192)
State.create(:id=>2615, :name=>"Voronezj",:country_id=>192)
State.create(:id=>2616, :name=>"Vologda",:country_id=>192)
State.create(:id=>2617, :name=>"Volgograd",:country_id=>192)
State.create(:id=>2618, :name=>"Uljanovsk",:country_id=>192)
State.create(:id=>2619, :name=>"Udmurtiya",:country_id=>192)
State.create(:id=>2620, :name=>"Tverskaya",:country_id=>192)
State.create(:id=>2621, :name=>"Tula",:country_id=>192)
State.create(:id=>2622, :name=>"Tatarstan",:country_id=>192)
State.create(:id=>2623, :name=>"Tambov",:country_id=>192)
State.create(:id=>2624, :name=>"Stavropol'skiy",:country_id=>192)
State.create(:id=>2625, :name=>"Smolensk",:country_id=>192)
State.create(:id=>2626, :name=>"Saratov",:country_id=>192)
State.create(:id=>2627, :name=>"Samara",:country_id=>192)
State.create(:id=>2628, :name=>"Rjazan",:country_id=>192)
State.create(:id=>2629, :name=>"Rostov",:country_id=>192)
State.create(:id=>2630, :name=>"Pskov",:country_id=>192)
State.create(:id=>2631, :name=>"Perm",:country_id=>192)
State.create(:id=>2632, :name=>"Penza",:country_id=>192)
State.create(:id=>2633, :name=>"Orjol",:country_id=>192)
State.create(:id=>2634, :name=>"Orenburg",:country_id=>192)
State.create(:id=>2635, :name=>"Novgorod",:country_id=>192)
State.create(:id=>2636, :name=>"North Ossetia",:country_id=>192)
State.create(:id=>2637, :name=>"<NAME>",:country_id=>192)
State.create(:id=>2638, :name=>"Murmansk",:country_id=>192)
State.create(:id=>2639, :name=>"Moscow",:country_id=>192)
State.create(:id=>2640, :name=>"Moskovskaya",:country_id=>192)
State.create(:id=>2641, :name=>"Mordoviya",:country_id=>192)
State.create(:id=>2642, :name=>"Mariy-El",:country_id=>192)
State.create(:id=>2643, :name=>"Lipetsk",:country_id=>192)
State.create(:id=>2644, :name=>"Leningrad",:country_id=>192)
State.create(:id=>2645, :name=>"Sankt-Peterburg",:country_id=>192)
State.create(:id=>2646, :name=>"Kursk",:country_id=>192)
State.create(:id=>2647, :name=>"Krasnodarskiy",:country_id=>192)
State.create(:id=>2648, :name=>"Kostroma",:country_id=>192)
State.create(:id=>2649, :name=>"Komi",:country_id=>192)
State.create(:id=>2650, :name=>"Kirov",:country_id=>192)
State.create(:id=>2651, :name=>"Kareliya",:country_id=>192)
State.create(:id=>2652, :name=>"Karachayevo-Cherkesiya",:country_id=>192)
State.create(:id=>2653, :name=>"Kaluga",:country_id=>192)
State.create(:id=>2654, :name=>"Kalmykiya",:country_id=>192)
State.create(:id=>2655, :name=>"Kaliningrad",:country_id=>192)
State.create(:id=>2656, :name=>"Kabardino-Balkariya",:country_id=>192)
State.create(:id=>2657, :name=>"Ivanovo",:country_id=>192)
State.create(:id=>2658, :name=>"Ingushetiya",:country_id=>192)
State.create(:id=>2659, :name=>"<NAME>",:country_id=>192)
State.create(:id=>2660, :name=>"Dagestan",:country_id=>192)
State.create(:id=>2661, :name=>"Chuvashia",:country_id=>192)
State.create(:id=>2662, :name=>"Chechnya",:country_id=>192)
State.create(:id=>2663, :name=>"Brjansk",:country_id=>192)
State.create(:id=>2664, :name=>"Belgorod",:country_id=>192)
State.create(:id=>2665, :name=>"Bashkortostan",:country_id=>192)
State.create(:id=>2666, :name=>"Astrakhan",:country_id=>192)
State.create(:id=>2667, :name=>"Arkhangelskaya",:country_id=>192)
State.create(:id=>2668, :name=>"Adygeya",:country_id=>192)
State.create(:id=>2669, :name=>"Vladimir",:country_id=>192)
State.create(:id=>2670, :name=>"Yamalo-<NAME>",:country_id=>192)
State.create(:id=>2671, :name=>"Tjumen",:country_id=>192)
State.create(:id=>2672, :name=>"Tyva",:country_id=>192)
State.create(:id=>2673, :name=>"Tomsk",:country_id=>192)
State.create(:id=>2674, :name=>"Sverdlovsk",:country_id=>192)
State.create(:id=>2675, :name=>"Omsk",:country_id=>192)
State.create(:id=>2676, :name=>"Novosibirsk",:country_id=>192)
State.create(:id=>2677, :name=>"Kurgan",:country_id=>192)
State.create(:id=>2678, :name=>"Krasnoyarskiy",:country_id=>192)
State.create(:id=>2679, :name=>"Khanty-Mansiyskiy Avtonomnyy Okrug",:country_id=>192)
State.create(:id=>2680, :name=>"Khakasiya",:country_id=>192)
State.create(:id=>2681, :name=>"Kemerovo",:country_id=>192)
State.create(:id=>2682, :name=>"Altay",:country_id=>192)
State.create(:id=>2683, :name=>"Tsjeljabinsk",:country_id=>192)
State.create(:id=>2684, :name=>"Altayskiy",:country_id=>192)
State.create(:id=>2685, :name=>"Sakha",:country_id=>192)
State.create(:id=>2686, :name=>"Primorskiy",:country_id=>192)
State.create(:id=>2687, :name=>"<NAME>",:country_id=>192)
State.create(:id=>2688, :name=>"Irkutsk",:country_id=>192)
State.create(:id=>2689, :name=>"Zabaïkalski",:country_id=>192)
State.create(:id=>2690, :name=>"Jewish Autonomous Oblast",:country_id=>192)
State.create(:id=>2691, :name=>"Amur",:country_id=>192)
State.create(:id=>2692, :name=>"Buryatiya",:country_id=>192)
State.create(:id=>2693, :name=>"Sakhalin",:country_id=>192)
State.create(:id=>2694, :name=>"Magadan",:country_id=>192)
State.create(:id=>2695, :name=>"Kamtsjatka",:country_id=>192)
State.create(:id=>2696, :name=>"Chukotskiy Avtonomnyy Okrug",:country_id=>192)
State.create(:id=>2697, :name=>"<NAME>",:country_id=>192)
State.create(:id=>2698, :name=>"Eastern Province",:country_id=>193)
State.create(:id=>2699, :name=>"Kigali City",:country_id=>193)
State.create(:id=>2700, :name=>"Northern Province",:country_id=>193)
State.create(:id=>2701, :name=>"Western Province",:country_id=>193)
State.create(:id=>2702, :name=>"Southern Province",:country_id=>193)
State.create(:id=>2703, :name=>"Tabūk",:country_id=>194)
State.create(:id=>2704, :name=>"Najrān",:country_id=>194)
State.create(:id=>2705, :name=>"Makkah",:country_id=>194)
State.create(:id=>2706, :name=>"Jīzān",:country_id=>194)
State.create(:id=>2707, :name=>"Ḩāʼil",:country_id=>194)
State.create(:id=>2708, :name=>"Minţaqat ‘Asīr",:country_id=>194)
State.create(:id=>2709, :name=>"<NAME>",:country_id=>194)
State.create(:id=>2710, :name=>"Ar Riyāḑ",:country_id=>194)
State.create(:id=>2711, :name=>"Al Qaşīm",:country_id=>194)
State.create(:id=>2712, :name=>"Al Madīnah",:country_id=>194)
State.create(:id=>2713, :name=>"Al Jawf",:country_id=>194)
State.create(:id=>2714, :name=>"Northern Borders Region",:country_id=>194)
State.create(:id=>2715, :name=>"Al Bāḩah",:country_id=>194)
State.create(:id=>2716, :name=>"Western Province",:country_id=>195)
State.create(:id=>2717, :name=>"Malaita",:country_id=>195)
State.create(:id=>2718, :name=>"Isabel",:country_id=>195)
State.create(:id=>2719, :name=>"Guadalcanal",:country_id=>195)
State.create(:id=>2720, :name=>"Central Province",:country_id=>195)
State.create(:id=>2721, :name=>"Temotu",:country_id=>195)
State.create(:id=>2722, :name=>"Makira",:country_id=>195)
State.create(:id=>2723, :name=>"Choiseul",:country_id=>195)
State.create(:id=>2724, :name=>"Rennell and Bellona",:country_id=>195)
State.create(:id=>2725, :name=>"Takamaka",:country_id=>196)
State.create(:id=>2726, :name=>"Saint Louis",:country_id=>196)
State.create(:id=>2727, :name=>"Port Glaud",:country_id=>196)
State.create(:id=>2728, :name=>"Pointe Larue",:country_id=>196)
State.create(:id=>2729, :name=>"Plaisance",:country_id=>196)
State.create(:id=>2730, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2731, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2732, :name=>"English River",:country_id=>196)
State.create(:id=>2733, :name=>"Inner Islands",:country_id=>196)
State.create(:id=>2734, :name=>"Grand Anse Mahe",:country_id=>196)
State.create(:id=>2735, :name=>"Grand Anse Praslin",:country_id=>196)
State.create(:id=>2736, :name=>"Glacis",:country_id=>196)
State.create(:id=>2737, :name=>"Cascade",:country_id=>196)
State.create(:id=>2738, :name=>"Bel Ombre",:country_id=>196)
State.create(:id=>2739, :name=>"Bel Air",:country_id=>196)
State.create(:id=>2740, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2741, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2742, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2743, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2744, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2745, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2746, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2747, :name=>"Les Mamelles",:country_id=>196)
State.create(:id=>2748, :name=>"<NAME>",:country_id=>196)
State.create(:id=>2749, :name=>"Au Cap",:country_id=>196)
State.create(:id=>2750, :name=>"Northern",:country_id=>197)
State.create(:id=>2751, :name=>"Khartoum",:country_id=>197)
State.create(:id=>2752, :name=>"Upper Nile State",:country_id=>197)
State.create(:id=>2753, :name=>"Red Sea",:country_id=>197)
State.create(:id=>2754, :name=>"Lakes State",:country_id=>197)
State.create(:id=>2755, :name=>"Gezira",:country_id=>197)
State.create(:id=>2756, :name=>"Gedaref",:country_id=>197)
State.create(:id=>2757, :name=>"Unity",:country_id=>197)
State.create(:id=>2758, :name=>"White Nile",:country_id=>197)
State.create(:id=>2759, :name=>"Blue Nile State",:country_id=>197)
State.create(:id=>2760, :name=>"<NAME>",:country_id=>197)
State.create(:id=>2761, :name=>"Western Equatoria",:country_id=>197)
State.create(:id=>2762, :name=>"Western Bahr al Ghazal",:country_id=>197)
State.create(:id=>2763, :name=>"Western Darfur State",:country_id=>197)
State.create(:id=>2764, :name=>"Southern Darfur",:country_id=>197)
State.create(:id=>2765, :name=>"Southern Kordofan State",:country_id=>197)
State.create(:id=>2766, :name=>"Jonglei State",:country_id=>197)
State.create(:id=>2767, :name=>"Kassala State",:country_id=>197)
State.create(:id=>2768, :name=>"River Nile State",:country_id=>197)
State.create(:id=>2769, :name=>"Northern Bahr el Ghazal State",:country_id=>197)
State.create(:id=>2770, :name=>"Northern Darfur",:country_id=>197)
State.create(:id=>2771, :name=>"Northern Kordofan",:country_id=>197)
State.create(:id=>2772, :name=>"Eastern Equatoria",:country_id=>197)
State.create(:id=>2773, :name=>"Sinnar State",:country_id=>197)
State.create(:id=>2774, :name=>"Warab State",:country_id=>197)
State.create(:id=>2775, :name=>"Norrbotten",:country_id=>198)
State.create(:id=>2776, :name=>"Västmanland",:country_id=>198)
State.create(:id=>2777, :name=>"Västernorrland",:country_id=>198)
State.create(:id=>2778, :name=>"Västerbotten",:country_id=>198)
State.create(:id=>2779, :name=>"Värmland",:country_id=>198)
State.create(:id=>2780, :name=>"Uppsala",:country_id=>198)
State.create(:id=>2781, :name=>"Stockholm",:country_id=>198)
State.create(:id=>2782, :name=>"Södermanland",:country_id=>198)
State.create(:id=>2783, :name=>"Östergötland",:country_id=>198)
State.create(:id=>2784, :name=>"Örebro",:country_id=>198)
State.create(:id=>2785, :name=>"Kronoberg",:country_id=>198)
State.create(:id=>2786, :name=>"Dalarna",:country_id=>198)
State.create(:id=>2787, :name=>"Kalmar",:country_id=>198)
State.create(:id=>2788, :name=>"Jönköping",:country_id=>198)
State.create(:id=>2789, :name=>"Jämtland",:country_id=>198)
State.create(:id=>2790, :name=>"Halland",:country_id=>198)
State.create(:id=>2791, :name=>"Gotland",:country_id=>198)
State.create(:id=>2792, :name=>"Gävleborg",:country_id=>198)
State.create(:id=>2793, :name=>"Blekinge",:country_id=>198)
State.create(:id=>2794, :name=>"Skåne",:country_id=>198)
State.create(:id=>2795, :name=>"Västra Götaland",:country_id=>198)
State.create(:id=>2796, :name=>"Central Singapore",:country_id=>199)
State.create(:id=>2797, :name=>"North East",:country_id=>199)
State.create(:id=>2798, :name=>"South East",:country_id=>199)
State.create(:id=>2799, :name=>"South West",:country_id=>199)
State.create(:id=>2800, :name=>"North West",:country_id=>199)
State.create(:id=>2801, :name=>"Ascension",:country_id=>200)
State.create(:id=>2802, :name=>"<NAME>",:country_id=>200)
State.create(:id=>2803, :name=>"<NAME>",:country_id=>200)
State.create(:id=>2804, :name=>"Žalec",:country_id=>201)
State.create(:id=>2805, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2806, :name=>"Vrhnika",:country_id=>201)
State.create(:id=>2807, :name=>"Tržič",:country_id=>201)
State.create(:id=>2808, :name=>"Trebnje",:country_id=>201)
State.create(:id=>2809, :name=>"Trbovlje",:country_id=>201)
State.create(:id=>2810, :name=>"Tolmin",:country_id=>201)
State.create(:id=>2811, :name=>"Velenje",:country_id=>201)
State.create(:id=>2812, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2813, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2814, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2815, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2816, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2817, :name=>"Sežana",:country_id=>201)
State.create(:id=>2818, :name=>"Sevnica",:country_id=>201)
State.create(:id=>2819, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2820, :name=>"Ribnica",:country_id=>201)
State.create(:id=>2821, :name=>"Radovljica",:country_id=>201)
State.create(:id=>2822, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2823, :name=>"Ptuj",:country_id=>201)
State.create(:id=>2824, :name=>"Postojna",:country_id=>201)
State.create(:id=>2825, :name=>"Piran-Pirano",:country_id=>201)
State.create(:id=>2826, :name=>"Ormož",:country_id=>201)
State.create(:id=>2827, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2828, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2829, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2830, :name=>"Mozirje",:country_id=>201)
State.create(:id=>2831, :name=>"Metlika",:country_id=>201)
State.create(:id=>2832, :name=>"Maribor",:country_id=>201)
State.create(:id=>2833, :name=>"Logatec",:country_id=>201)
State.create(:id=>2834, :name=>"Ljutomer",:country_id=>201)
State.create(:id=>2835, :name=>"Litija",:country_id=>201)
State.create(:id=>2836, :name=>"Lenart",:country_id=>201)
State.create(:id=>2837, :name=>"Laško",:country_id=>201)
State.create(:id=>2838, :name=>"Krško",:country_id=>201)
State.create(:id=>2839, :name=>"Kranj",:country_id=>201)
State.create(:id=>2840, :name=>"Koper-Capodistria",:country_id=>201)
State.create(:id=>2841, :name=>"Kočevje",:country_id=>201)
State.create(:id=>2842, :name=>"Kamnik",:country_id=>201)
State.create(:id=>2843, :name=>"Jesenice",:country_id=>201)
State.create(:id=>2844, :name=>"Izola", :country_id=>201)
State.create(:id=>2845, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2846, :name=>"Idrija",:country_id=>201)
State.create(:id=>2847, :name=>"Hrastnik",:country_id=>201)
State.create(:id=>2848, :name=>"Grosuplje",:country_id=>201)
State.create(:id=>2849, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2850, :name=>"Dravograd",:country_id=>201)
State.create(:id=>2851, :name=>"Domžale",:country_id=>201)
State.create(:id=>2852, :name=>"Črnomelj",:country_id=>201)
State.create(:id=>2853, :name=>"Cerknica",:country_id=>201)
State.create(:id=>2854, :name=>"Celje",:country_id=>201)
State.create(:id=>2855, :name=>"Brežice",:country_id=>201)
State.create(:id=>2856, :name=>"Ajdovščina",:country_id=>201)
State.create(:id=>2857, :name=>"Hrpelje-Kozina",:country_id=>201)
State.create(:id=>2858, :name=>"Divača",:country_id=>201)
State.create(:id=>2859, :name=>"Pivka",:country_id=>201)
State.create(:id=>2860, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2861, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2862, :name=>"Osilnica",:country_id=>201)
State.create(:id=>2863, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2864, :name=>"Škofljica",:country_id=>201)
State.create(:id=>2865, :name=>"Ig",:country_id=>201)
State.create(:id=>2866, :name=>"Brezovica",:country_id=>201)
State.create(:id=>2867, :name=>"Borovnica",:country_id=>201)
State.create(:id=>2868, :name=>"Vipava",:country_id=>201)
State.create(:id=>2869, :name=>"Komen",:country_id=>201)
State.create(:id=>2870, :name=>"Miren-Kostanjevica",:country_id=>201)
State.create(:id=>2871, :name=>"Brda",:country_id=>201)
State.create(:id=>2872, :name=>"Kanal",:country_id=>201)
State.create(:id=>2873, :name=>"Žiri",:country_id=>201)
State.create(:id=>2874, :name=>"Cerkno",:country_id=>201)
State.create(:id=>2875, :name=>"Železniki",:country_id=>201)
State.create(:id=>2876, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2877, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2878, :name=>"Kobarid",:country_id=>201)
State.create(:id=>2879, :name=>"Bovec",:country_id=>201)
State.create(:id=>2880, :name=>"Bohinj",:country_id=>201)
State.create(:id=>2881, :name=>"Bled",:country_id=>201)
State.create(:id=>2882, :name=>"Naklo",:country_id=>201)
State.create(:id=>2883, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2884, :name=>"Preddvor",:country_id=>201)
State.create(:id=>2885, :name=>"Cerklje Na Gorenjskem",:country_id=>201)
State.create(:id=>2886, :name=>"Šenčur",:country_id=>201)
State.create(:id=>2887, :name=>"Vodice",:country_id=>201)
State.create(:id=>2888, :name=>"Medvode",:country_id=>201)
State.create(:id=>2889, :name=>"Mengeš",:country_id=>201)
State.create(:id=>2890, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2891, :name=>"Moravče",:country_id=>201)
State.create(:id=>2892, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2893, :name=>"Luče",:country_id=>201)
State.create(:id=>2894, :name=>"Ravne na Koroškem",:country_id=>201)
State.create(:id=>2895, :name=>"Mežica",:country_id=>201)
State.create(:id=>2896, :name=>"Muta",:country_id=>201)
State.create(:id=>2897, :name=>"Vuzenica",:country_id=>201)
State.create(:id=>2898, :name=>"Črna na Koroškem",:country_id=>201)
State.create(:id=>2899, :name=>"Ljubno",:country_id=>201)
State.create(:id=>2900, :name=>"Šoštanj",:country_id=>201)
State.create(:id=>2901, :name=>"Šmartno ob Paki",:country_id=>201)
State.create(:id=>2902, :name=>"Lukovica",:country_id=>201)
State.create(:id=>2903, :name=>"Radeče",:country_id=>201)
State.create(:id=>2904, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2905, :name=>"Dobrepolje",:country_id=>201)
State.create(:id=>2906, :name=>"Semič",:country_id=>201)
State.create(:id=>2907, :name=>"Šentjernej",:country_id=>201)
State.create(:id=>2908, :name=>"Škocjan",:country_id=>201)
State.create(:id=>2909, :name=>"Štore",:country_id=>201)
State.create(:id=>2910, :name=>"Vojnik",:country_id=>201)
State.create(:id=>2911, :name=>"Vitanje",:country_id=>201)
State.create(:id=>2912, :name=>"Zreče",:country_id=>201)
State.create(:id=>2913, :name=>"Mislinja",:country_id=>201)
State.create(:id=>2914, :name=>"Ruše",:country_id=>201)
State.create(:id=>2915, :name=>"Kungota",:country_id=>201)
State.create(:id=>2916, :name=>"Šentilj",:country_id=>201)
State.create(:id=>2917, :name=>"Pesnica",:country_id=>201)
State.create(:id=>2918, :name=>"Duplek",:country_id=>201)
State.create(:id=>2919, :name=>"Rače-Fram",:country_id=>201)
State.create(:id=>2920, :name=>"Starše",:country_id=>201)
State.create(:id=>2921, :name=>"Kidričevo",:country_id=>201)
State.create(:id=>2922, :name=>"Majšperk",:country_id=>201)
State.create(:id=>2923, :name=>"Videm",:country_id=>201)
State.create(:id=>2924, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2925, :name=>"Rogatec",:country_id=>201)
State.create(:id=>2926, :name=>"Podčetrtek",:country_id=>201)
State.create(:id=>2927, :name=>"Kozje",:country_id=>201)
State.create(:id=>2928, :name=>"Gorišnica",:country_id=>201)
State.create(:id=>2929, :name=>"Zavrč",:country_id=>201)
State.create(:id=>2930, :name=>"Dornava",:country_id=>201)
State.create(:id=>2931, :name=>"Juršinci",:country_id=>201)
State.create(:id=>2932, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2933, :name=>"Radenci",:country_id=>201)
State.create(:id=>2934, :name=>"Puconci",:country_id=>201)
State.create(:id=>2935, :name=>"Rogašovci",:country_id=>201)
State.create(:id=>2936, :name=>"Kuzma",:country_id=>201)
State.create(:id=>2937, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2938, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2939, :name=>"Kobilje",:country_id=>201)
State.create(:id=>2940, :name=>"Beltinci",:country_id=>201)
State.create(:id=>2941, :name=>"Turnišče",:country_id=>201)
State.create(:id=>2942, :name=>"Odranci",:country_id=>201)
State.create(:id=>2943, :name=>"Črenšovci",:country_id=>201)
State.create(:id=>2944, :name=>"Nazarje",:country_id=>201)
State.create(:id=>2945, :name=>"Ljubljana",:country_id=>201)
State.create(:id=>2946, :name=>"Žirovnica",:country_id=>201)
State.create(:id=>2947, :name=>"Jezersko",:country_id=>201)
State.create(:id=>2948, :name=>"Solčava",:country_id=>201)
State.create(:id=>2949, :name=>"Komenda",:country_id=>201)
State.create(:id=>2950, :name=>"Horjul",:country_id=>201)
State.create(:id=>2951, :name=>"Šempeter-Vrtojba",:country_id=>201)
State.create(:id=>2952, :name=>"Bloke",:country_id=>201)
State.create(:id=>2953, :name=>"Sodražica",:country_id=>201)
State.create(:id=>2954, :name=>"Trzin",:country_id=>201)
State.create(:id=>2955, :name=>"Prevalje",:country_id=>201)
State.create(:id=>2956, :name=>"Vransko",:country_id=>201)
State.create(:id=>2957, :name=>"Tabor",:country_id=>201)
State.create(:id=>2958, :name=>"Braslovče",:country_id=>201)
State.create(:id=>2959, :name=>"Polzela",:country_id=>201)
State.create(:id=>2960, :name=>"Prebold",:country_id=>201)
State.create(:id=>2961, :name=>"Kostel",:country_id=>201)
State.create(:id=>2962, :name=>"Žužemberk",:country_id=>201)
State.create(:id=>2963, :name=>"Dolenjske Toplice",:country_id=>201)
State.create(:id=>2964, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2965, :name=>"Bistrica ob Sotli",:country_id=>201)
State.create(:id=>2966, :name=>"Dobje",:country_id=>201)
State.create(:id=>2967, :name=>"Dobrna",:country_id=>201)
State.create(:id=>2968, :name=>"Oplotnica",:country_id=>201)
State.create(:id=>2969, :name=>"Podvelka",:country_id=>201)
State.create(:id=>2970, :name=>"Ribnica na Pohorju",:country_id=>201)
State.create(:id=>2971, :name=>"Lovrenc na Pohorju",:country_id=>201)
State.create(:id=>2972, :name=>"Selnica ob Dravi",:country_id=>201)
State.create(:id=>2973, :name=>"Hoče-Slivnica",:country_id=>201)
State.create(:id=>2974, :name=>"Miklavž na Dravskem Polju",:country_id=>201)
State.create(:id=>2975, :name=>"Hajdina",:country_id=>201)
State.create(:id=>2976, :name=>"Žetale",:country_id=>201)
State.create(:id=>2977, :name=>"Podlehnik",:country_id=>201)
State.create(:id=>2978, :name=>"Markovci",:country_id=>201)
State.create(:id=>2979, :name=>"Destrnik",:country_id=>201)
State.create(:id=>2980, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2981, :name=>"<NAME> <NAME>",:country_id=>201)
State.create(:id=>2982, :name=>"Cerkvenjak",:country_id=>201)
State.create(:id=>2983, :name=>"Benedikt",:country_id=>201)
State.create(:id=>2984, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2985, :name=>"Križevci",:country_id=>201)
State.create(:id=>2986, :name=>"Veržej",:country_id=>201)
State.create(:id=>2987, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2988, :name=>"Lendava-Lendva",:country_id=>201)
State.create(:id=>2989, :name=>"Dobrovnik-Dobronak",:country_id=>201)
State.create(:id=>2990, :name=>"Tišina",:country_id=>201)
State.create(:id=>2991, :name=>"Cankova",:country_id=>201)
State.create(:id=>2992, :name=>"Grad",:country_id=>201)
State.create(:id=>2993, :name=>"Hodoš-Hodos",:country_id=>201)
State.create(:id=>2994, :name=>"Razkrižje",:country_id=>201)
State.create(:id=>2995, :name=>"<NAME>",:country_id=>201)
State.create(:id=>2996, :name=>"Šalovci",:country_id=>201)
State.create(:id=>2997, :name=>"<NAME>",:country_id=>202)
State.create(:id=>2998, :name=>"Svalbard",:country_id=>202)
State.create(:id=>2999, :name=>"Košický",:country_id=>203)
State.create(:id=>3000, :name=>"Prešovský",:country_id=>203)
State.create(:id=>3001, :name=>"Žilinský",:country_id=>203)
State.create(:id=>3002, :name=>"Banskobystrický",:country_id=>203)
State.create(:id=>3003, :name=>"Bratislavský",:country_id=>203)
State.create(:id=>3004, :name=>"Nitriansky",:country_id=>203)
State.create(:id=>3005, :name=>"Trenčiansky",:country_id=>203)
State.create(:id=>3006, :name=>"Trnavský",:country_id=>203)
State.create(:id=>3007, :name=>"Western Area",:country_id=>204)
State.create(:id=>3008, :name=>"Southern Province",:country_id=>204)
State.create(:id=>3009, :name=>"Northern Province",:country_id=>204)
State.create(:id=>3010, :name=>"Eastern Province",:country_id=>204)
State.create(:id=>3011, :name=>"Serravalle",:country_id=>205)
State.create(:id=>3012, :name=>"Chiesanuova",:country_id=>205)
State.create(:id=>3013, :name=>"San Marino",:country_id=>205)
State.create(:id=>3014, :name=>"Acquaviva",:country_id=>205)
State.create(:id=>3015, :name=>"<NAME>",:country_id=>205)
State.create(:id=>3016, :name=>"Domagnano",:country_id=>205)
State.create(:id=>3017, :name=>"Faetano",:country_id=>205)
State.create(:id=>3018, :name=>"Fiorentino",:country_id=>205)
State.create(:id=>3019, :name=>"Montegiardino",:country_id=>205)
State.create(:id=>3020, :name=>"Ziguinchor",:country_id=>206)
State.create(:id=>3021, :name=>"Thiès",:country_id=>206)
State.create(:id=>3022, :name=>"Tambacounda",:country_id=>206)
State.create(:id=>3023, :name=>"Saint-Louis",:country_id=>206)
State.create(:id=>3024, :name=>"Matam",:country_id=>206)
State.create(:id=>3025, :name=>"Louga",:country_id=>206)
State.create(:id=>3026, :name=>"Kolda",:country_id=>206)
State.create(:id=>3027, :name=>"Kaolack",:country_id=>206)
State.create(:id=>3028, :name=>"Fatick",:country_id=>206)
State.create(:id=>3029, :name=>"Diourbel",:country_id=>206)
State.create(:id=>3030, :name=>"Dakar",:country_id=>206)
State.create(:id=>3031, :name=>"Kaffrine",:country_id=>206)
State.create(:id=>3032, :name=>"Kédougou",:country_id=>206)
State.create(:id=>3033, :name=>"Sédhiou",:country_id=>206)
State.create(:id=>3034, :name=>"<NAME>",:country_id=>207)
State.create(:id=>3035, :name=>"Togdheer",:country_id=>207)
State.create(:id=>3036, :name=>"<NAME>",:country_id=>207)
State.create(:id=>3037, :name=>"Middle Shabele",:country_id=>207)
State.create(:id=>3038, :name=>"Sanaag",:country_id=>207)
State.create(:id=>3039, :name=>"Nugaal",:country_id=>207)
State.create(:id=>3040, :name=>"Mudug",:country_id=>207)
State.create(:id=>3041, :name=>"Lower Juba",:country_id=>207)
State.create(:id=>3042, :name=>"Middle Juba",:country_id=>207)
State.create(:id=>3043, :name=>"Hiiraan",:country_id=>207)
State.create(:id=>3044, :name=>"Gedo",:country_id=>207)
State.create(:id=>3045, :name=>"Galguduud",:country_id=>207)
State.create(:id=>3046, :name=>"Bay",:country_id=>207)
State.create(:id=>3047, :name=>"Bari",:country_id=>207)
State.create(:id=>3048, :name=>"Banaadir",:country_id=>207)
State.create(:id=>3049, :name=>"Bakool",:country_id=>207)
State.create(:id=>3050, :name=>"Awdal",:country_id=>207)
State.create(:id=>3051, :name=>"Sool",:country_id=>207)
State.create(:id=>3052, :name=>"Wanica",:country_id=>208)
State.create(:id=>3053, :name=>"Sipaliwini",:country_id=>208)
State.create(:id=>3054, :name=>"Saramacca",:country_id=>208)
State.create(:id=>3055, :name=>"Paramaribo",:country_id=>208)
State.create(:id=>3056, :name=>"Para",:country_id=>208)
State.create(:id=>3057, :name=>"Nickerie",:country_id=>208)
State.create(:id=>3058, :name=>"Marowijne",:country_id=>208)
State.create(:id=>3059, :name=>"Coronie",:country_id=>208)
State.create(:id=>3060, :name=>"Commewijne",:country_id=>208)
State.create(:id=>3061, :name=>"Brokopondo",:country_id=>208)
State.create(:id=>3062, :name=>"<NAME>",:country_id=>209)
State.create(:id=>3063, :name=>"Príncipe",:country_id=>209)
State.create(:id=>3064, :name=>"Usulután",:country_id=>210)
State.create(:id=>3065, :name=>"Sonsonate",:country_id=>210)
State.create(:id=>3066, :name=>"San Vicente",:country_id=>210)
State.create(:id=>3067, :name=>"<NAME>",:country_id=>210)
State.create(:id=>3068, :name=>"San Salvador",:country_id=>210)
State.create(:id=>3069, :name=>"San Miguel",:country_id=>210)
State.create(:id=>3070, :name=>"Morazán",:country_id=>210)
State.create(:id=>3071, :name=>"La Unión",:country_id=>210)
State.create(:id=>3072, :name=>"La Paz",:country_id=>210)
State.create(:id=>3073, :name=>"La Libertad",:country_id=>210)
State.create(:id=>3074, :name=>"Cuscatlán",:country_id=>210)
State.create(:id=>3075, :name=>"Chalatenango",:country_id=>210)
State.create(:id=>3076, :name=>"Cabañas",:country_id=>210)
State.create(:id=>3077, :name=>"Ahuachapán",:country_id=>210)
State.create(:id=>3078, :name=>"Tartus",:country_id=>212)
State.create(:id=>3079, :name=>"Damascus City",:country_id=>212)
State.create(:id=>3080, :name=>"Idlib",:country_id=>212)
State.create(:id=>3081, :name=>"Homs",:country_id=>212)
State.create(:id=>3082, :name=>"Hama",:country_id=>212)
State.create(:id=>3083, :name=>"Aleppo",:country_id=>212)
State.create(:id=>3084, :name=>"Rif-dimashq",:country_id=>212)
State.create(:id=>3085, :name=>"<NAME>",:country_id=>212)
State.create(:id=>3086, :name=>"Daraa",:country_id=>212)
State.create(:id=>3087, :name=>"As-Suwayda",:country_id=>212)
State.create(:id=>3088, :name=>"Ar-Raqqah",:country_id=>212)
State.create(:id=>3089, :name=>"Quneitra",:country_id=>212)
State.create(:id=>3090, :name=>"Latakia",:country_id=>212)
State.create(:id=>3091, :name=>"Al-Hasakah",:country_id=>212)
State.create(:id=>3092, :name=>"Shiselweni",:country_id=>213)
State.create(:id=>3093, :name=>"Manzini",:country_id=>213)
State.create(:id=>3094, :name=>"Lubombo",:country_id=>213)
State.create(:id=>3095, :name=>"Hhohho",:country_id=>213)
State.create(:id=>3096, :name=>"Salamat",:country_id=>215)
State.create(:id=>3097, :name=>"Ouaddaï",:country_id=>215)
State.create(:id=>3098, :name=>"Biltine",:country_id=>215)
State.create(:id=>3099, :name=>"Tandjilé",:country_id=>215)
State.create(:id=>3100, :name=>"Moyen-Chari",:country_id=>215)
State.create(:id=>3101, :name=>"Mayo-Kébbi",:country_id=>215)
State.create(:id=>3102, :name=>"Logone Oriental",:country_id=>215)
State.create(:id=>3103, :name=>"Logone Occidental",:country_id=>215)
State.create(:id=>3104, :name=>"Lac",:country_id=>215)
State.create(:id=>3105, :name=>"Kanem",:country_id=>215)
State.create(:id=>3106, :name=>"Guéra",:country_id=>215)
State.create(:id=>3107, :name=>"Chari-Baguirmi",:country_id=>215)
State.create(:id=>3108, :name=>"Batha",:country_id=>215)
State.create(:id=>3109, :name=>"Région du Borkou",:country_id=>215)
State.create(:id=>3110, :name=>"Région du Hadjer-Lamis",:country_id=>215)
State.create(:id=>3111, :name=>"Région du Mandoul",:country_id=>215)
State.create(:id=>3112, :name=>"Région du Mayo-Kébbi Ouest",:country_id=>215)
State.create(:id=>3113, :name=>"Région de la Ville de N'Djaména",:country_id=>215)
State.create(:id=>3114, :name=>"Région du Barh el Gazel",:country_id=>215)
State.create(:id=>3115, :name=>"Ennedi",:country_id=>215)
State.create(:id=>3116, :name=>"Région du Sila",:country_id=>215)
State.create(:id=>3117, :name=>"Région du Tibesti",:country_id=>215)
State.create(:id=>3118, :name=>"Crozet",:country_id=>216)
State.create(:id=>3119, :name=>"Kerguelen",:country_id=>216)
State.create(:id=>3120, :name=>"Saint-Paul-et-Amsterdam",:country_id=>216)
State.create(:id=>3121, :name=>"<NAME>",:country_id=>216)
State.create(:id=>3122, :name=>"Terre-Adélie",:country_id=>216)
State.create(:id=>3123, :name=>"Savanes",:country_id=>217)
State.create(:id=>3124, :name=>"Plateaux",:country_id=>217)
State.create(:id=>3125, :name=>"Maritime",:country_id=>217)
State.create(:id=>3126, :name=>"Centrale",:country_id=>217)
State.create(:id=>3127, :name=>"Kara",:country_id=>217)
State.create(:id=>3128, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3129, :name=>"Trang",:country_id=>218)
State.create(:id=>3130, :name=>"Tak",:country_id=>218)
State.create(:id=>3131, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3132, :name=>"Sukhothai",:country_id=>218)
State.create(:id=>3133, :name=>"Ratchaburi",:country_id=>218)
State.create(:id=>3134, :name=>"Ranong",:country_id=>218)
State.create(:id=>3135, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3136, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3137, :name=>"Phetchaburi",:country_id=>218)
State.create(:id=>3138, :name=>"Phangnga",:country_id=>218)
State.create(:id=>3139, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3140, :name=>"Lamphun",:country_id=>218)
State.create(:id=>3141, :name=>"Lampang",:country_id=>218)
State.create(:id=>3142, :name=>"Krabi",:country_id=>218)
State.create(:id=>3143, :name=>"Kanchanaburi",:country_id=>218)
State.create(:id=>3144, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3145, :name=>"Chumphon",:country_id=>218)
State.create(:id=>3146, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3147, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3148, :name=>"Yasothon",:country_id=>218)
State.create(:id=>3149, :name=>"Yala",:country_id=>218)
State.create(:id=>3150, :name=>"Uttaradit",:country_id=>218)
State.create(:id=>3151, :name=>"Trat",:country_id=>218)
State.create(:id=>3152, :name=>"Surin",:country_id=>218)
State.create(:id=>3153, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3154, :name=>"Songkhla",:country_id=>218)
State.create(:id=>3155, :name=>"Sisaket",:country_id=>218)
State.create(:id=>3156, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3157, :name=>"Satun",:country_id=>218)
State.create(:id=>3158, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3159, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3160, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3161, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3162, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3163, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3164, :name=>"Rayong",:country_id=>218)
State.create(:id=>3165, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3166, :name=>"Phrae",:country_id=>218)
State.create(:id=>3167, :name=>"Phitsanulok",:country_id=>218)
State.create(:id=>3168, :name=>"Phichit",:country_id=>218)
State.create(:id=>3169, :name=>"Phetchabun",:country_id=>218)
State.create(:id=>3170, :name=>"Phayao",:country_id=>218)
State.create(:id=>3171, :name=>"Phatthalung",:country_id=>218)
State.create(:id=>3172, :name=>"Pattani",:country_id=>218)
State.create(:id=>3173, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3174, :name=>"Nonthaburi",:country_id=>218)
State.create(:id=>3175, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3176, :name=>"Narathiwat",:country_id=>218)
State.create(:id=>3177, :name=>"Nan",:country_id=>218)
State.create(:id=>3178, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3179, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3180, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3181, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3182, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3183, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3184, :name=>"Mukdahan",:country_id=>218)
State.create(:id=>3185, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3186, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3187, :name=>"Loei",:country_id=>218)
State.create(:id=>3188, :name=>"Bangkok",:country_id=>218)
State.create(:id=>3189, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3190, :name=>"Kalasin",:country_id=>218)
State.create(:id=>3191, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3192, :name=>"Chanthaburi",:country_id=>218)
State.create(:id=>3193, :name=>"Chaiyaphum",:country_id=>218)
State.create(:id=>3194, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3195, :name=>"Chachoengsao",:country_id=>218)
State.create(:id=>3196, :name=>"Buriram",:country_id=>218)
State.create(:id=>3197, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3198, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3199, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3200, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3201, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3202, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3203, :name=>"<NAME>",:country_id=>218)
State.create(:id=>3204, :name=>"Sughd",:country_id=>219)
State.create(:id=>3205, :name=>"Gorno-Badakhshan",:country_id=>219)
State.create(:id=>3206, :name=>"Khatlon",:country_id=>219)
State.create(:id=>3207, :name=>"Region of Republican Subordination",:country_id=>219)
State.create(:id=>3208, :name=>"Dushanbe",:country_id=>219)
State.create(:id=>3209, :name=>"Nukunonu",:country_id=>220)
State.create(:id=>3210, :name=>"Fakaofo",:country_id=>220)
State.create(:id=>3211, :name=>"Atafu",:country_id=>220)
State.create(:id=>3212, :name=>"Viqueque",:country_id=>221)
State.create(:id=>3213, :name=>"Manufahi",:country_id=>221)
State.create(:id=>3214, :name=>"Distrito Manatuto",:country_id=>221)
State.create(:id=>3215, :name=>"Distrito Liquiçá",:country_id=>221)
State.create(:id=>3216, :name=>"Distrito Lautém",:country_id=>221)
State.create(:id=>3217, :name=>"Distrito Cova Lima",:country_id=>221)
State.create(:id=>3218, :name=>"Ermera",:country_id=>221)
State.create(:id=>3219, :name=>"Distrito Díli",:country_id=>221)
State.create(:id=>3220, :name=>"Bobonaro",:country_id=>221)
State.create(:id=>3221, :name=>"Distrito Bacau",:country_id=>221)
State.create(:id=>3222, :name=>"Oecussi",:country_id=>221)
State.create(:id=>3223, :name=>"Distrito Ainaro",:country_id=>221)
State.create(:id=>3224, :name=>"Distrito Aileu",:country_id=>221)
State.create(:id=>3225, :name=>"Balkan",:country_id=>222)
State.create(:id=>3226, :name=>"Ahal",:country_id=>222)
State.create(:id=>3227, :name=>"Daşoguz",:country_id=>222)
State.create(:id=>3228, :name=>"Mary",:country_id=>222)
State.create(:id=>3229, :name=>"Lebap",:country_id=>222)
State.create(:id=>3230, :name=>"Zaghwān",:country_id=>223)
State.create(:id=>3231, :name=>"Tūnis",:country_id=>223)
State.create(:id=>3232, :name=>"Tawzar",:country_id=>223)
State.create(:id=>3233, :name=>"Taţāwīn",:country_id=>223)
State.create(:id=>3234, :name=>"Sūsah",:country_id=>223)
State.create(:id=>3235, :name=>"Silyānah",:country_id=>223)
State.create(:id=>3236, :name=>"<NAME>",:country_id=>223)
State.create(:id=>3237, :name=>"Şafāqis",:country_id=>223)
State.create(:id=>3238, :name=>"Qibilī",:country_id=>223)
State.create(:id=>3239, :name=>"Qafşah",:country_id=>223)
State.create(:id=>3240, :name=>"Qābis",:country_id=>223)
State.create(:id=>3241, :name=>"Nābul",:country_id=>223)
State.create(:id=>3242, :name=>"Madanīn",:country_id=>223)
State.create(:id=>3243, :name=>"Jundūbah",:country_id=>223)
State.create(:id=>3244, :name=>"Bin ‘Arūs",:country_id=>223)
State.create(:id=>3245, :name=>"Banzart",:country_id=>223)
State.create(:id=>3246, :name=>"Bājah",:country_id=>223)
State.create(:id=>3247, :name=>"Ariana",:country_id=>223)
State.create(:id=>3248, :name=>"Al Qayrawān",:country_id=>223)
State.create(:id=>3249, :name=>"Al Qaşrayn",:country_id=>223)
State.create(:id=>3250, :name=>"Al Munastīr",:country_id=>223)
State.create(:id=>3251, :name=>"Al Mahdīyah",:country_id=>223)
State.create(:id=>3252, :name=>"Kef",:country_id=>223)
State.create(:id=>3253, :name=>"Gouvernorat de la Manouba",:country_id=>223)
State.create(:id=>3254, :name=>"Vava`u",:country_id=>224)
State.create(:id=>3255, :name=>"Tongatapu",:country_id=>224)
State.create(:id=>3256, :name=>"Ha`apai",:country_id=>224)
State.create(:id=>3257, :name=>"Eua",:country_id=>224)
State.create(:id=>3258, :name=>"Niuas",:country_id=>224)
State.create(:id=>3259, :name=>"Yozgat",:country_id=>225)
State.create(:id=>3260, :name=>"Van",:country_id=>225)
State.create(:id=>3261, :name=>"Uşak",:country_id=>225)
State.create(:id=>3262, :name=>"Şanlıurfa",:country_id=>225)
State.create(:id=>3263, :name=>"Tunceli",:country_id=>225)
State.create(:id=>3264, :name=>"Sivas",:country_id=>225)
State.create(:id=>3265, :name=>"Siirt",:country_id=>225)
State.create(:id=>3266, :name=>"Niğde",:country_id=>225)
State.create(:id=>3267, :name=>"Nevşehir",:country_id=>225)
State.create(:id=>3268, :name=>"Muş",:country_id=>225)
State.create(:id=>3269, :name=>"Muğla",:country_id=>225)
State.create(:id=>3270, :name=>"Mardin",:country_id=>225)
State.create(:id=>3271, :name=>"Manisa",:country_id=>225)
State.create(:id=>3272, :name=>"Malatya",:country_id=>225)
State.create(:id=>3273, :name=>"Kütahya",:country_id=>225)
State.create(:id=>3274, :name=>"Konya",:country_id=>225)
State.create(:id=>3275, :name=>"Kırşehir",:country_id=>225)
State.create(:id=>3276, :name=>"Kayseri",:country_id=>225)
State.create(:id=>3277, :name=>"Kahramanmaraş",:country_id=>225)
State.create(:id=>3278, :name=>"İzmir",:country_id=>225)
State.create(:id=>3279, :name=>"Isparta",:country_id=>225)
State.create(:id=>3280, :name=>"Mersin",:country_id=>225)
State.create(:id=>3281, :name=>"Hatay",:country_id=>225)
State.create(:id=>3282, :name=>"Hakkâri",:country_id=>225)
State.create(:id=>3283, :name=>"Gaziantep",:country_id=>225)
State.create(:id=>3284, :name=>"Eskişehir",:country_id=>225)
State.create(:id=>3285, :name=>"Erzurum",:country_id=>225)
State.create(:id=>3286, :name=>"Erzincan",:country_id=>225)
State.create(:id=>3287, :name=>"Elazığ",:country_id=>225)
State.create(:id=>3288, :name=>"Diyarbakır",:country_id=>225)
State.create(:id=>3289, :name=>"Denizli",:country_id=>225)
State.create(:id=>3290, :name=>"Burdur",:country_id=>225)
State.create(:id=>3291, :name=>"Bitlis",:country_id=>225)
State.create(:id=>3292, :name=>"Bingöl",:country_id=>225)
State.create(:id=>3293, :name=>"Bilecik",:country_id=>225)
State.create(:id=>3294, :name=>"Balıkesir",:country_id=>225)
State.create(:id=>3295, :name=>"Aydın",:country_id=>225)
State.create(:id=>3296, :name=>"Antalya",:country_id=>225)
State.create(:id=>3297, :name=>"Ankara",:country_id=>225)
State.create(:id=>3298, :name=>"Ağrı",:country_id=>225)
State.create(:id=>3299, :name=>"Afyonkarahisar",:country_id=>225)
State.create(:id=>3300, :name=>"Adıyaman",:country_id=>225)
State.create(:id=>3301, :name=>"Adana",:country_id=>225)
State.create(:id=>3302, :name=>"Osmaniye",:country_id=>225)
State.create(:id=>3303, :name=>"Iğdır",:country_id=>225)
State.create(:id=>3304, :name=>"Aksaray",:country_id=>225)
State.create(:id=>3305, :name=>"Batman",:country_id=>225)
State.create(:id=>3306, :name=>"Karaman",:country_id=>225)
State.create(:id=>3307, :name=>"Kırıkkale",:country_id=>225)
State.create(:id=>3308, :name=>"Şırnak",:country_id=>225)
State.create(:id=>3309, :name=>"Kilis",:country_id=>225)
State.create(:id=>3310, :name=>"Zonguldak",:country_id=>225)
State.create(:id=>3311, :name=>"Trabzon",:country_id=>225)
State.create(:id=>3312, :name=>"Tokat",:country_id=>225)
State.create(:id=>3313, :name=>"Tekirdağ",:country_id=>225)
State.create(:id=>3314, :name=>"Sinop",:country_id=>225)
State.create(:id=>3315, :name=>"Samsun",:country_id=>225)
State.create(:id=>3316, :name=>"Sakarya",:country_id=>225)
State.create(:id=>3317, :name=>"Rize",:country_id=>225)
State.create(:id=>3318, :name=>"Ordu",:country_id=>225)
State.create(:id=>3319, :name=>"Kocaeli",:country_id=>225)
State.create(:id=>3320, :name=>"Kırklareli",:country_id=>225)
State.create(:id=>3321, :name=>"Kastamonu",:country_id=>225)
State.create(:id=>3322, :name=>"Kars",:country_id=>225)
State.create(:id=>3323, :name=>"Istanbul",:country_id=>225)
State.create(:id=>3324, :name=>"Gümüşhane",:country_id=>225)
State.create(:id=>3325, :name=>"Giresun",:country_id=>225)
State.create(:id=>3326, :name=>"Edirne",:country_id=>225)
State.create(:id=>3327, :name=>"Çorum",:country_id=>225)
State.create(:id=>3328, :name=>"Çankırı",:country_id=>225)
State.create(:id=>3329, :name=>"Çanakkale",:country_id=>225)
State.create(:id=>3330, :name=>"Bursa",:country_id=>225)
State.create(:id=>3331, :name=>"Bolu",:country_id=>225)
State.create(:id=>3332, :name=>"Artvin",:country_id=>225)
State.create(:id=>3333, :name=>"Amasya",:country_id=>225)
State.create(:id=>3334, :name=>"Bartın",:country_id=>225)
State.create(:id=>3335, :name=>"Karabük",:country_id=>225)
State.create(:id=>3336, :name=>"Yalova",:country_id=>225)
State.create(:id=>3337, :name=>"Ardahan",:country_id=>225)
State.create(:id=>3338, :name=>"Bayburt",:country_id=>225)
State.create(:id=>3339, :name=>"Düzce",:country_id=>225)
State.create(:id=>3340, :name=>"Tobago",:country_id=>226)
State.create(:id=>3341, :name=>"<NAME>",:country_id=>226)
State.create(:id=>3342, :name=>"Port-of-Spain",:country_id=>226)
State.create(:id=>3343, :name=>"Mayaro",:country_id=>226)
State.create(:id=>3344, :name=>"Arima",:country_id=>226)
State.create(:id=>3345, :name=>"Chaguanas",:country_id=>226)
State.create(:id=>3346, :name=>"Couva-Tabaquite-Talparo",:country_id=>226)
State.create(:id=>3347, :name=>"<NAME>",:country_id=>226)
State.create(:id=>3348, :name=>"Eastern Tobago",:country_id=>226)
State.create(:id=>3349, :name=>"Penal/Debe",:country_id=>226)
State.create(:id=>3350, :name=>"Princes Town",:country_id=>226)
State.create(:id=>3351, :name=>"Point Fortin",:country_id=>226)
State.create(:id=>3352, :name=>"<NAME>",:country_id=>226)
State.create(:id=>3353, :name=>"Siparia",:country_id=>226)
State.create(:id=>3354, :name=>"San Juan/Laventille",:country_id=>226)
State.create(:id=>3355, :name=>"Tunapuna/Piarco",:country_id=>226)
State.create(:id=>3356, :name=>"Nui",:country_id=>227)
State.create(:id=>3357, :name=>"Nanumea",:country_id=>227)
State.create(:id=>3358, :name=>"Funafuti",:country_id=>227)
State.create(:id=>3359, :name=>"Niutao",:country_id=>227)
State.create(:id=>3360, :name=>"Nanumanga",:country_id=>227)
State.create(:id=>3361, :name=>"Vaitupu",:country_id=>227)
State.create(:id=>3362, :name=>"Nukufetau",:country_id=>227)
State.create(:id=>3363, :name=>"Nukulaelae",:country_id=>227)
State.create(:id=>3364, :name=>"Fukien",:country_id=>228)
State.create(:id=>3365, :name=>"Kaohsiung",:country_id=>228)
State.create(:id=>3366, :name=>"Taipei",:country_id=>228)
State.create(:id=>3367, :name=>"Taiwan",:country_id=>228)
State.create(:id=>3368, :name=>"Kagera",:country_id=>229)
State.create(:id=>3369, :name=>"Zanzibar Urban/West",:country_id=>229)
State.create(:id=>3370, :name=>"Zanzibar North",:country_id=>229)
State.create(:id=>3371, :name=>"Zanzibar Central/South",:country_id=>229)
State.create(:id=>3372, :name=>"Tanga",:country_id=>229)
State.create(:id=>3373, :name=>"Tabora",:country_id=>229)
State.create(:id=>3374, :name=>"Singida",:country_id=>229)
State.create(:id=>3375, :name=>"Shinyanga",:country_id=>229)
State.create(:id=>3376, :name=>"Rukwa Region",:country_id=>229)
State.create(:id=>3377, :name=>"Pwani",:country_id=>229)
State.create(:id=>3378, :name=>"Pemba South",:country_id=>229)
State.create(:id=>3379, :name=>"Pemba North",:country_id=>229)
State.create(:id=>3380, :name=>"Mwanza",:country_id=>229)
State.create(:id=>3381, :name=>"Morogoro Region",:country_id=>229)
State.create(:id=>3382, :name=>"Mbeya",:country_id=>229)
State.create(:id=>3383, :name=>"Mara",:country_id=>229)
State.create(:id=>3384, :name=>"Lindi",:country_id=>229)
State.create(:id=>3385, :name=>"Kilimanjaro",:country_id=>229)
State.create(:id=>3386, :name=>"Kigoma",:country_id=>229)
State.create(:id=>3387, :name=>"Iringa",:country_id=>229)
State.create(:id=>3388, :name=>"Dodoma",:country_id=>229)
State.create(:id=>3389, :name=>"<NAME>",:country_id=>229)
State.create(:id=>3390, :name=>"Arusha",:country_id=>229)
State.create(:id=>3391, :name=>"Manyara",:country_id=>229)
State.create(:id=>3392, :name=>"Ruvuma",:country_id=>229)
State.create(:id=>3393, :name=>"Mtwara",:country_id=>229)
State.create(:id=>3394, :name=>"Zhytomyrs'ka",:country_id=>230)
State.create(:id=>3395, :name=>"Zaporiz'ka",:country_id=>230)
State.create(:id=>3396, :name=>"Zakarpats'ka",:country_id=>230)
State.create(:id=>3397, :name=>"Volyns'ka",:country_id=>230)
State.create(:id=>3398, :name=>"Vinnyts'ka",:country_id=>230)
State.create(:id=>3399, :name=>"Ternopil's'ka",:country_id=>230)
State.create(:id=>3400, :name=>"Sumy",:country_id=>230)
State.create(:id=>3401, :name=>"<NAME>",:country_id=>230)
State.create(:id=>3402, :name=>"Rivnens'ka",:country_id=>230)
State.create(:id=>3403, :name=>"Poltava",:country_id=>230)
State.create(:id=>3404, :name=>"Odessa",:country_id=>230)
State.create(:id=>3405, :name=>"Mykolayivs'ka",:country_id=>230)
State.create(:id=>3406, :name=>"L'vivs'ka",:country_id=>230)
State.create(:id=>3407, :name=>"Luhans'ka",:country_id=>230)
State.create(:id=>3408, :name=>"Kiev",:country_id=>230)
State.create(:id=>3409, :name=>"<NAME>",:country_id=>230)
State.create(:id=>3410, :name=>"<NAME>",:country_id=>230)
State.create(:id=>3411, :name=>"Kirovohrads'ka",:country_id=>230)
State.create(:id=>3412, :name=>"Khmel'nyts'ka",:country_id=>230)
State.create(:id=>3413, :name=>"Kherson",:country_id=>230)
State.create(:id=>3414, :name=>"Kharkivs'ka",:country_id=>230)
State.create(:id=>3415, :name=>"Ivano-Frankivs'ka",:country_id=>230)
State.create(:id=>3416, :name=>"Donets'ka",:country_id=>230)
State.create(:id=>3417, :name=>"Dnipropetrovska",:country_id=>230)
State.create(:id=>3418, :name=>"Chernivets'ka",:country_id=>230)
State.create(:id=>3419, :name=>"Chernihivs'ka",:country_id=>230)
State.create(:id=>3420, :name=>"Cherkas'ka",:country_id=>230)
State.create(:id=>3421, :name=>"Masaka",:country_id=>231)
State.create(:id=>3422, :name=>"Mpigi",:country_id=>231)
State.create(:id=>3423, :name=>"Namutumba",:country_id=>231)
State.create(:id=>3424, :name=>"Bukedea",:country_id=>231)
State.create(:id=>3425, :name=>"Apac",:country_id=>231)
State.create(:id=>3426, :name=>"Arua",:country_id=>231)
State.create(:id=>3427, :name=>"Bundibugyo",:country_id=>231)
State.create(:id=>3428, :name=>"Bushenyi",:country_id=>231)
State.create(:id=>3429, :name=>"Gulu",:country_id=>231)
State.create(:id=>3430, :name=>"Hoima",:country_id=>231)
State.create(:id=>3431, :name=>"Iganga",:country_id=>231)
State.create(:id=>3432, :name=>"Jinja",:country_id=>231)
State.create(:id=>3433, :name=>"Kabale",:country_id=>231)
State.create(:id=>3434, :name=>"Kabarole",:country_id=>231)
State.create(:id=>3435, :name=>"Kalangala",:country_id=>231)
State.create(:id=>3436, :name=>"Kampala",:country_id=>231)
State.create(:id=>3437, :name=>"Kamuli",:country_id=>231)
State.create(:id=>3438, :name=>"Kapchorwa",:country_id=>231)
State.create(:id=>3439, :name=>"Kasese",:country_id=>231)
State.create(:id=>3440, :name=>"Kibale",:country_id=>231)
State.create(:id=>3441, :name=>"Kiboga",:country_id=>231)
State.create(:id=>3442, :name=>"Kisoro", :country_id=>231)
State.create(:id=>3443, :name=>"Kitgum",:country_id=>231)
State.create(:id=>3444, :name=>"Kotido",:country_id=>231)
State.create(:id=>3445, :name=>"Kumi",:country_id=>231)
State.create(:id=>3446, :name=>"Lira",:country_id=>231)
State.create(:id=>3447, :name=>"Luwero",:country_id=>231)
State.create(:id=>3448, :name=>"Masindi",:country_id=>231)
State.create(:id=>3449, :name=>"Mbale",:country_id=>231)
State.create(:id=>3450, :name=>"Mbarara",:country_id=>231)
State.create(:id=>3451, :name=>"Moroto",:country_id=>231)
State.create(:id=>3452, :name=>"Moyo",:country_id=>231)
State.create(:id=>3453, :name=>"Mubende",:country_id=>231)
State.create(:id=>3454, :name=>"Mukono",:country_id=>231)
State.create(:id=>3455, :name=>"Nebbi",:country_id=>231)
State.create(:id=>3456, :name=>"Ntungamo",:country_id=>231)
State.create(:id=>3457, :name=>"Pallisa",:country_id=>231)
State.create(:id=>3458, :name=>"Rakai",:country_id=>231)
State.create(:id=>3459, :name=>"Rukungiri",:country_id=>231)
State.create(:id=>3460, :name=>"Soroti",:country_id=>231)
State.create(:id=>3461, :name=>"Tororo",:country_id=>231)
State.create(:id=>3462, :name=>"Adjumani",:country_id=>231)
State.create(:id=>3463, :name=>"Bugiri",:country_id=>231)
State.create(:id=>3464, :name=>"Busia",:country_id=>231)
State.create(:id=>3465, :name=>"Katakwi",:country_id=>231)
State.create(:id=>3466, :name=>"Nakasongola",:country_id=>231)
State.create(:id=>3467, :name=>"Sembabule",:country_id=>231)
State.create(:id=>3468, :name=>"Kaberamaido",:country_id=>231)
State.create(:id=>3469, :name=>"Kamwenge",:country_id=>231)
State.create(:id=>3470, :name=>"Kanungu",:country_id=>231)
State.create(:id=>3471, :name=>"Kayunga",:country_id=>231)
State.create(:id=>3472, :name=>"Kyenjojo",:country_id=>231)
State.create(:id=>3473, :name=>"Mayuge",:country_id=>231)
State.create(:id=>3474, :name=>"Nakapiripirit",:country_id=>231)
State.create(:id=>3475, :name=>"Pader",:country_id=>231)
State.create(:id=>3476, :name=>"Sironko",:country_id=>231)
State.create(:id=>3477, :name=>"Wa", :try_id=>231)
State.create(:id=>3478, :name=>"Yumbe",:country_id=>231)
State.create(:id=>3479, :name=>"Abim",:country_id=>231)
State.create(:id=>3480, :name=>"Amolatar",:country_id=>231)
State.create(:id=>3481, :name=>"Amuria",:country_id=>231)
State.create(:id=>3482, :name=>"Amuru",:country_id=>231)
State.create(:id=>3483, :name=>"Budaka",:country_id=>231)
State.create(:id=>3484, :name=>"Bududa",:country_id=>231)
State.create(:id=>3485, :name=>"Bulisa",:country_id=>231)
State.create(:id=>3486, :name=>"Butaleja",:country_id=>231)
State.create(:id=>3487, :name=>"Dokolo",:country_id=>231)
State.create(:id=>3488, :name=>"Ibanda",:country_id=>231)
State.create(:id=>3489, :name=>"Isingiro",:country_id=>231)
State.create(:id=>3490, :name=>"Kaabong",:country_id=>231)
State.create(:id=>3491, :name=>"Kaliro",:country_id=>231)
State.create(:id=>3492, :name=>"Kiruhura",:country_id=>231)
State.create(:id=>3493, :name=>"Koboko",:country_id=>231)
State.create(:id=>3494, :name=>"Lyantonde",:country_id=>231)
State.create(:id=>3495, :name=>"Manafwa",:country_id=>231)
State.create(:id=>3496, :name=>"Maracha",:country_id=>231)
State.create(:id=>3497, :name=>"Mityana",:country_id=>231)
State.create(:id=>3498, :name=>"Nakaseke",:country_id=>231)
State.create(:id=>3499, :name=>"Oyam",:country_id=>231)
State.create(:id=>3500, :name=>"Bukwa",:country_id=>231)
State.create(:id=>3501, :name=>"Wake Island",:country_id=>232)
State.create(:id=>3502, :name=>"Navassa Island",:country_id=>232)
State.create(:id=>3503, :name=>"Baker Island",:country_id=>232)
State.create(:id=>3504, :name=>"Howland Island",:country_id=>232)
State.create(:id=>3505, :name=>"Jarvis Island",:country_id=>232)
State.create(:id=>3506, :name=>"<NAME>",:country_id=>232)
State.create(:id=>3507, :name=>"<NAME>",:country_id=>232)
State.create(:id=>3508, :name=>"Midway Islands",:country_id=>232)
State.create(:id=>3509, :name=>"<NAME>",:country_id=>232)
State.create(:id=>3510, :name=>"Arkansas",:country_id=>233)
State.create(:id=>3511, :name=>"<NAME>.",:country_id=>233)
State.create(:id=>3512, :name=>"Delaware",:country_id=>233)
State.create(:id=>3513, :name=>"Florida",:country_id=>233)
State.create(:id=>3514, :name=>"Georgia",:country_id=>233)
State.create(:id=>3515, :name=>"Kansas",:country_id=>233)
State.create(:id=>3516, :name=>"Louisiana",:country_id=>233)
State.create(:id=>3517, :name=>"Maryland",:country_id=>233)
State.create(:id=>3518, :name=>"Missouri",:country_id=>233)
State.create(:id=>3519, :name=>"Mississippi",:country_id=>233)
State.create(:id=>3520, :name=>"North Carolina",:country_id=>233)
State.create(:id=>3521, :name=>"Oklahoma",:country_id=>233)
State.create(:id=>3522, :name=>"South Carolina",:country_id=>233)
State.create(:id=>3523, :name=>"Tennessee",:country_id=>233)
State.create(:id=>3524, :name=>"Texas",:country_id=>233)
State.create(:id=>3525, :name=>"West Virginia",:country_id=>233)
State.create(:id=>3526, :name=>"Alabama",:country_id=>233)
State.create(:id=>3527, :name=>"Connecticut",:country_id=>233)
State.create(:id=>3528, :name=>"Iowa",:country_id=>233)
State.create(:id=>3529, :name=>"Illinois",:country_id=>233)
State.create(:id=>3530, :name=>"Indiana",:country_id=>233)
State.create(:id=>3531, :name=>"Maine",:country_id=>233)
State.create(:id=>3532, :name=>"Michigan",:country_id=>233)
State.create(:id=>3533, :name=>"Minnesota",:country_id=>233)
State.create(:id=>3534, :name=>"Nebraska",:country_id=>233)
State.create(:id=>3535, :name=>"New Hampshire",:country_id=>233)
State.create(:id=>3536, :name=>"New Jersey",:country_id=>233)
State.create(:id=>3537, :name=>"New York",:country_id=>233)
State.create(:id=>3538, :name=>"Ohio",:country_id=>233)
State.create(:id=>3539, :name=>"Rhode Island",:country_id=>233)
State.create(:id=>3540, :name=>"Vermont",:country_id=>233)
State.create(:id=>3541, :name=>"Wisconsin",:country_id=>233)
State.create(:id=>3542, :name=>"California",:country_id=>233)
State.create(:id=>3543, :name=>"Colorado",:country_id=>233)
State.create(:id=>3544, :name=>"New Mexico",:country_id=>233)
State.create(:id=>3545, :name=>"Nevada",:country_id=>233)
State.create(:id=>3546, :name=>"Utah",:country_id=>233)
State.create(:id=>3547, :name=>"Arizona",:country_id=>233)
State.create(:id=>3548, :name=>"Idaho",:country_id=>233)
State.create(:id=>3549, :name=>"Montana",:country_id=>233)
State.create(:id=>3550, :name=>"North Dakota",:country_id=>233)
State.create(:id=>3551, :name=>"Oregon",:country_id=>233)
State.create(:id=>3552, :name=>"South Dakota",:country_id=>233)
State.create(:id=>3553, :name=>"Washington",:country_id=>233)
State.create(:id=>3554, :name=>"Wyoming",:country_id=>233)
State.create(:id=>3555, :name=>"Hawaii",:country_id=>233)
State.create(:id=>3556, :name=>"Alaska",:country_id=>233)
State.create(:id=>3557, :name=>"Kentucky",:country_id=>233)
State.create(:id=>3558, :name=>"Massachusetts",:country_id=>233)
State.create(:id=>3559, :name=>"Pennsylvania",:country_id=>233)
State.create(:id=>3560, :name=>"Virginia",:country_id=>233)
State.create(:id=>3561, :name=>"Treinta y Tres",:country_id=>234)
State.create(:id=>3562, :name=>"Tacuarembó",:country_id=>234)
State.create(:id=>3563, :name=>"Soriano",:country_id=>234)
State.create(:id=>3564, :name=>"San José",:country_id=>234)
State.create(:id=>3565, :name=>"Salto",:country_id=>234)
State.create(:id=>3566, :name=>"Rocha",:country_id=>234)
State.create(:id=>3567, :name=>"Rivera",:country_id=>234)
State.create(:id=>3568, :name=>"<NAME>",:country_id=>234)
State.create(:id=>3569, :name=>"Paysandú",:country_id=>234)
State.create(:id=>3570, :name=>"Montevideo",:country_id=>234)
State.create(:id=>3571, :name=>"Maldonado",:country_id=>234)
State.create(:id=>3572, :name=>"Lavalleja",:country_id=>234)
State.create(:id=>3573, :name=>"Florida",:country_id=>234)
State.create(:id=>3574, :name=>"Flores",:country_id=>234)
State.create(:id=>3575, :name=>"Durazno",:country_id=>234)
State.create(:id=>3576, :name=>"Colonia",:country_id=>234)
State.create(:id=>3577, :name=>"<NAME>",:country_id=>234)
State.create(:id=>3578, :name=>"Canelones",:country_id=>234)
State.create(:id=>3579, :name=>"Artigas",:country_id=>234)
State.create(:id=>3580, :name=>"Karakalpakstan",:country_id=>235)
State.create(:id=>3581, :name=>"Surxondaryo",:country_id=>235)
State.create(:id=>3582, :name=>"Samarqand",:country_id=>235)
State.create(:id=>3583, :name=>"Qashqadaryo",:country_id=>235)
State.create(:id=>3584, :name=>"Buxoro",:country_id=>235)
State.create(:id=>3585, :name=>"Toshkent",:country_id=>235)
State.create(:id=>3586, :name=>"<NAME>",:country_id=>235)
State.create(:id=>3587, :name=>"Sirdaryo",:country_id=>235)
State.create(:id=>3588, :name=>"Navoiy",:country_id=>235)
State.create(:id=>3589, :name=>"Namangan",:country_id=>235)
State.create(:id=>3590, :name=>"Xorazm",:country_id=>235)
State.create(:id=>3591, :name=>"Jizzax",:country_id=>235)
State.create(:id=>3592, :name=>"<NAME>",:country_id=>235)
State.create(:id=>3593, :name=>"Andijon",:country_id=>235)
State.create(:id=>3594, :name=>"<NAME>",:country_id=>237)
State.create(:id=>3595, :name=>"<NAME>",:country_id=>237)
State.create(:id=>3596, :name=>"<NAME>",:country_id=>237)
State.create(:id=>3597, :name=>"<NAME>",:country_id=>237)
State.create(:id=>3598, :name=>"Grenadines",:country_id=>237)
State.create(:id=>3599, :name=>"Charlotte",:country_id=>237)
State.create(:id=>3600, :name=>"Zulia",:country_id=>238)
State.create(:id=>3601, :name=>"Yaracuy",:country_id=>238)
State.create(:id=>3602, :name=>"Trujillo",:country_id=>238)
State.create(:id=>3603, :name=>"Táchira",:country_id=>238)
State.create(:id=>3604, :name=>"Sucre",:country_id=>238)
State.create(:id=>3605, :name=>"Portuguesa",:country_id=>238)
State.create(:id=>3606, :name=>"<NAME>",:country_id=>238)
State.create(:id=>3607, :name=>"Monagas",:country_id=>238)
State.create(:id=>3608, :name=>"Miranda",:country_id=>238)
State.create(:id=>3609, :name=>"Mérida",:country_id=>238)
State.create(:id=>3610, :name=>"Lara",:country_id=>238)
State.create(:id=>3611, :name=>"Guárico",:country_id=>238)
State.create(:id=>3612, :name=>"Dependencias Federales",:country_id=>238)
State.create(:id=>3613, :name=>"Distrito Capital",:country_id=>238)
State.create(:id=>3614, :name=>"Falcón",:country_id=>238)
State.create(:id=>3615, :name=>"<NAME>",:country_id=>238)
State.create(:id=>3616, :name=>"Cojedes",:country_id=>238)
State.create(:id=>3617, :name=>"Carabobo",:country_id=>238)
State.create(:id=>3618, :name=>"Bolívar",:country_id=>238)
State.create(:id=>3619, :name=>"Barinas",:country_id=>238)
State.create(:id=>3620, :name=>"Aragua",:country_id=>238)
State.create(:id=>3621, :name=>"Apure",:country_id=>238)
State.create(:id=>3622, :name=>"Anzoátegui",:country_id=>238)
State.create(:id=>3623, :name=>"Amazonas",:country_id=>238)
State.create(:id=>3624, :name=>"Vargas",:country_id=>238)
State.create(:id=>3625, :name=>"Saint Croix Island",:country_id=>240)
State.create(:id=>3626, :name=>"Saint John Island",:country_id=>240)
State.create(:id=>3627, :name=>"Saint Thomas Island",:country_id=>240)
State.create(:id=>3628, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3629, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3630, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3631, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3632, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3633, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3634, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3635, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3636, :name=>"<NAME>ai",:country_id=>241)
State.create(:id=>3637, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3638, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3639, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3640, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3641, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3642, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3643, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3644, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3645, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3646, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3647, :name=>"<NAME>ình",:country_id=>241)
State.create(:id=>3648, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3649, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3650, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3651, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3652, :name=>"Lâm Ðồng",:country_id=>241)
State.create(:id=>3653, :name=>"Lai Châu",:country_id=>241)
State.create(:id=>3654, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3655, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3656, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3657, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3658, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3659, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3660, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3661, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3662, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3663, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3664, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3665, :name=>"Ðồng Tháp",:country_id=>241)
State.create(:id=>3666, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3667, :name=>"Ðắc Lắk",:country_id=>241)
State.create(:id=>3668, :name=>"Bà Rịa-Vũng Tàu",:country_id=>241)
State.create(:id=>3669, :name=>"Cao Bằng",:country_id=>241)
State.create(:id=>3670, :name=>"Bình Ðịnh",:country_id=>241)
State.create(:id=>3671, :name=>"Bến Tre",:country_id=>241)
State.create(:id=>3672, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3673, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3674, :name=>"<NAME> <NAME>",:country_id=>241)
State.create(:id=>3675, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3676, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3677, :name=>"Ðà Nẵng",:country_id=>241)
State.create(:id=>3678, :name=>"Bình Dương",:country_id=>241)
State.create(:id=>3679, :name=>"Bình Phước",:country_id=>241)
State.create(:id=>3680, :name=>"Thái Nguyên",:country_id=>241)
State.create(:id=>3681, :name=>"Quảng Nam",:country_id=>241)
State.create(:id=>3682, :name=>"Phú Thọ",:country_id=>241)
State.create(:id=>3683, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3684, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3685, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3686, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3687, :name=>"Cà Mau",:country_id=>241)
State.create(:id=>3688, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3689, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3690, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3691, :name=>"<NAME>",:country_id=>241)
State.create(:id=>3692, :name=>"Tafea",:country_id=>242)
State.create(:id=>3693, :name=>"Sanma",:country_id=>242)
State.create(:id=>3694, :name=>"Torba",:country_id=>242)
State.create(:id=>3695, :name=>"Malampa",:country_id=>242)
State.create(:id=>3696, :name=>"Penama",:country_id=>242)
State.create(:id=>3697, :name=>"Shefa",:country_id=>242)
State.create(:id=>3698, :name=>"Circonscription d'Uvéa",:country_id=>243)
State.create(:id=>3699, :name=>"Circonscription de Sigavé",:country_id=>243)
State.create(:id=>3700, :name=>"Circonscription d'Alo",:country_id=>243)
State.create(:id=>3701, :name=>"Vaisigano",:country_id=>244)
State.create(:id=>3702, :name=>"Va`a-o-Fonoti",:country_id=>244)
State.create(:id=>3703, :name=>"Tuamasaga",:country_id=>244)
State.create(:id=>3704, :name=>"Satupa`itea",:country_id=>244)
State.create(:id=>3705, :name=>"Palauli",:country_id=>244)
State.create(:id=>3706, :name=>"Gagaifomauga",:country_id=>244)
State.create(:id=>3707, :name=>"Gaga`emauga",:country_id=>244)
State.create(:id=>3708, :name=>"Fa`asaleleaga",:country_id=>244)
State.create(:id=>3709, :name=>"Atua",:country_id=>244)
State.create(:id=>3710, :name=>"Aiga-i-le-Tai",:country_id=>244)
State.create(:id=>3711, :name=>"A`ana",:country_id=>244)
State.create(:id=>3712, :name=>"<NAME>",:country_id=>123)
State.create(:id=>3713, :name=>"<NAME>",:country_id=>123)
State.create(:id=>3714, :name=>"Komuna e Ferizajt",:country_id=>123)
State.create(:id=>3715, :name=>"Komuna e Mitrovicës",:country_id=>123)
State.create(:id=>3716, :name=>"Komuna e Thërandës",:country_id=>123)
State.create(:id=>3717, :name=>"Komuna e Skenderajt",:country_id=>123)
State.create(:id=>3718, :name=>"Komuna e Prizrenit",:country_id=>123)
State.create(:id=>3719, :name=>"Komuna e Prishtinës",:country_id=>123)
State.create(:id=>3720, :name=>"Opština Podujevo",:country_id=>123)
State.create(:id=>3721, :name=>"Komuna e Pejës",:country_id=>123)
State.create(:id=>3722, :name=>"Komuna e Rahovecit",:country_id=>123)
State.create(:id=>3723, :name=>"Komuna e Lipjanit",:country_id=>123)
State.create(:id=>3724, :name=>"Komuna e Leposaviqit",:country_id=>123)
State.create(:id=>3725, :name=>"Opština Kamenica",:country_id=>123)
State.create(:id=>3726, :name=>"Opština Klina",:country_id=>123)
State.create(:id=>3727, :name=>"Komuna e Kaçanikut",:country_id=>123)
State.create(:id=>3728, :name=>"Komuna e Istogut",:country_id=>123)
State.create(:id=>3729, :name=>"Komuna e Gjilanit",:country_id=>123)
State.create(:id=>3730, :name=>"Komuna e Drenasit",:country_id=>123)
State.create(:id=>3731, :name=>"Komuna e Dragashit",:country_id=>123)
State.create(:id=>3732, :name=>"Komuna e Deçanit",:country_id=>123)
State.create(:id=>3733, :name=>"Komuna e Gjakovës",:country_id=>123)
State.create(:id=>3734, :name=>"<NAME>",:country_id=>123)
State.create(:id=>3735, :name=>"Opština Štrpce",:country_id=>123)
State.create(:id=>3736, :name=>"Komuna e Shtimes",:country_id=>123)
State.create(:id=>3737, :name=>"Opština Nov<NAME>",:country_id=>123)
State.create(:id=>3738, :name=>"Komuna e Obiliqit",:country_id=>123)
State.create(:id=>3739, :name=>"Komuna e Malisheves",:country_id=>123)
State.create(:id=>3740, :name=>"Komuna e Zubin Potokut",:country_id=>123)
State.create(:id=>3741, :name=>"Opština Zvečan",:country_id=>123)
State.create(:id=>3742, :name=>"Muḩāfaz̧at Ta‘izz",:country_id=>245)
State.create(:id=>3743, :name=>"Muḩāfaz̧at Shabwah",:country_id=>245)
State.create(:id=>3744, :name=>"Sanaa",:country_id=>245)
State.create(:id=>3745, :name=>"Muḩāfaz̧at Şa‘dah",:country_id=>245)
State.create(:id=>3746, :name=>"Muḩāfaz̧at Raymah",:country_id=>245)
State.create(:id=>3747, :name=>"Muḩāfaz̧at Ma’rib",:country_id=>245)
State.create(:id=>3748, :name=>"Al Maḩwīt",:country_id=>245)
State.create(:id=>3749, :name=>"Muḩāfaz̧at al Jawf",:country_id=>245)
State.create(:id=>3750, :name=>"Ḩaḑramawt",:country_id=>245)
State.create(:id=>3751, :name=>"Muḩāfaz̧at Dhamār",:country_id=>245)
State.create(:id=>3752, :name=>"Al Mahrah",:country_id=>245)
State.create(:id=>3753, :name=>"Al Ḩudaydah",:country_id=>245)
State.create(:id=>3754, :name=>"Al Bayḑāʼ",:country_id=>245)
State.create(:id=>3755, :name=>"Aden",:country_id=>245)
State.create(:id=>3756, :name=>"Abyan",:country_id=>245)
State.create(:id=>3757, :name=>"Muḩāfaz̧at aḑ Ḑāli‘",:country_id=>245)
State.create(:id=>3758, :name=>"Omran",:country_id=>245)
State.create(:id=>3759, :name=>"Muḩāfaz̧at Ḩajjah",:country_id=>245)
State.create(:id=>3760, :name=>"Muḩāfaz̧at Ibb",:country_id=>245)
State.create(:id=>3761, :name=>"Muḩāfaz̧at Laḩij",:country_id=>245)
State.create(:id=>3762, :name=>"<NAME>",:country_id=>245)
State.create(:id=>3763, :name=>"Acoua",:country_id=>246)
State.create(:id=>3764, :name=>"Bandraboua",:country_id=>246)
State.create(:id=>3765, :name=>"Bandrele",:country_id=>246)
State.create(:id=>3766, :name=>"Bouéni",:country_id=>246)
State.create(:id=>3767, :name=>"Chiconi",:country_id=>246)
State.create(:id=>3768, :name=>"Chirongui",:country_id=>246)
State.create(:id=>3769, :name=>"Dembeni",:country_id=>246)
State.create(:id=>3770, :name=>"Dzaoudzi",:country_id=>246)
State.create(:id=>3771, :name=>"Kani-Kéli",:country_id=>246)
State.create(:id=>3772, :name=>"Koungou",:country_id=>246)
State.create(:id=>3773, :name=>"Mamoudzou",:country_id=>246)
State.create(:id=>3774, :name=>"Mtsamboro",:country_id=>246)
State.create(:id=>3775, :name=>"M'Tsangamouji",:country_id=>246)
State.create(:id=>3776, :name=>"Ouangani",:country_id=>246)
State.create(:id=>3777, :name=>"Pamandzi",:country_id=>246)
State.create(:id=>3778, :name=>"Sada",:country_id=>246)
State.create(:id=>3779, :name=>"Tsingoni",:country_id=>246)
State.create(:id=>3780, :name=>"Free State",:country_id=>247)
State.create(:id=>3781, :name=>"KwaZulu-Natal",:country_id=>247)
State.create(:id=>3782, :name=>"Eastern Cape",:country_id=>247)
State.create(:id=>3783, :name=>"Gauteng",:country_id=>247)
State.create(:id=>3784, :name=>"Mpumalanga",:country_id=>247)
State.create(:id=>3785, :name=>"Northern Cape",:country_id=>247)
State.create(:id=>3786, :name=>"Limpopo",:country_id=>247)
State.create(:id=>3787, :name=>"North-West",:country_id=>247)
State.create(:id=>3788, :name=>"Western Cape",:country_id=>247)
State.create(:id=>3789, :name=>"Western",:country_id=>248)
State.create(:id=>3790, :name=>"Southern",:country_id=>248)
State.create(:id=>3791, :name=>"North-Western",:country_id=>248)
State.create(:id=>3792, :name=>"Northern",:country_id=>248)
State.create(:id=>3793, :name=>"Lusaka",:country_id=>248)
State.create(:id=>3794, :name=>"Luapula",:country_id=>248)
State.create(:id=>3795, :name=>"Eastern",:country_id=>248)
State.create(:id=>3796, :name=>"Copperbelt",:country_id=>248)
State.create(:id=>3797, :name=>"Central",:country_id=>248)
State.create(:id=>3798, :name=>"Midlands",:country_id=>249)
State.create(:id=>3799, :name=>"Matabeleland South",:country_id=>249)
State.create(:id=>3800, :name=>"Matabeleland North",:country_id=>249)
State.create(:id=>3801, :name=>"Masvingo",:country_id=>249)
State.create(:id=>3802, :name=>"Mashonaland West Province",:country_id=>249)
State.create(:id=>3803, :name=>"Mashonaland East",:country_id=>249)
State.create(:id=>3804, :name=>"Mashonaland Central",:country_id=>249)
State.create(:id=>3805, :name=>"Manicaland",:country_id=>249)
State.create(:id=>3806, :name=>"Bulawayo",:country_id=>249)
State.create(:id=>3807, :name=>"Harare Province",:country_id=>249)
# Admin user create
City.create(:name => "Haldi", :state_id=>1);
City.create(:name=> "kolkata", :state_id => 1)
user = User.create(:first_name => "admin", :email => "<EMAIL>", :password=> "<PASSWORD>")
user.add_role :admin<file_sep>/app/services/form_element.rb
module FormElement
def rservice_text_field field_value, label_value, form, *args
(label_tag label_value)+
(form.text_field field_value.to_sym, class: "form-control #{args.join(' ')}" )
end
def rservice_text_area field_value, label_value, form, *args
(label_tag label_value)+
(form.text_area field_value.to_sym, class: "form-control" )
end
def rservice_hidden_field field_value, value, form, *args
form.hidden_field field_value.to_sym, value: value
end
def rservice_submit_button value, form, *args
form.submit value, class: "btn #{args.join(' ')}"
end
def reservice_value_printer label ,value, *args
content_tag(:b, label) +
(value)
end
end<file_sep>/app/models/feedback.rb
class Feedback < ApplicationRecord
belongs_to :portfolio
belongs_to :user
belongs_to :service_request
validates :description, :rating, presence: true
end
<file_sep>/app/helpers/employee/service_requests_helper.rb
module Employee::ServiceRequestsHelper
def employee_action request
if request.status == Status.inprogress.last
(link_to 'Complete', accept_reject_employee_service_requests_path(value: "completed", id: request.id), method: :put, :remote => true, class: " btn btn-success btn-xs btn_margin", data: { confirm: 'Are you sure?' } ) +
(link_to 'On hold', accept_reject_employee_service_requests_path(value: "onhold", id: request.id), method: :put, :remote => true, class: " btn btn-danger btn-xs", data: { confirm: 'Are you sure?' }) +
(link_to 'Comment', open_comment_pop_up_path(id: request.id), method: :get, :remote => true, id: '#comment', class: " btn btn-info btn-xs btn_margin")
elsif (request.status == Status.completed.last )
request.feedback.present? ? add_rating_html_block(request) : "Not Yet Rated"
# elsif (request.status == Status.onhold.last )
# (link_to 'Comment', open_comment_pop_up_path(id: request.id), method: :get, :remote => true, id: '#comment', class: " btn btn-info btn-xs btn_margin")
else
"-"
end
end
end<file_sep>/app/models/state.rb
class State < ApplicationRecord
scope :details, proc{ |country_id| self.where(:country_id => country_id).order(name: :asc) if country_id.present? }
end
<file_sep>/app/models/service.rb
class Service < ApplicationRecord
mount_uploader :photo, ImageUploader
# has_and_belongs_to_many :cities
has_many :sub_services, class_name: "Service", foreign_key: "parent_id", dependent: :destroy
belongs_to :service, class_name: "Service", foreign_key: "parent_id",optional: true
has_many :service_requests
has_many :portfolios, through: :portfolio_services
has_many :time_slots, through: :service_requests, source: :service
has_many :portfolio_services
has_many :cities ,through: :portfolio_services
validates :name, presence: {message: 'Service name is required!'}
validates :name, length: { in: 3..110, message: 'There must be 3 to 110 characters only!' }
validates :city_ids, presence: {message: 'At leaset 1 city should be selected!'}, if: :is_parent_id_nil?
#scope methods
scope :get_services, -> { where("parent_id IS NULL") }
scope :get_sub_services, -> { where("parent_id IS NOT NULL") }
def is_parent_id_nil?
parent_id == nil
end
def self.get_all_services page, per_page=5
where(:parent_id => nil).order(:id).paginate(:page => page, :per_page => per_page)
end
def self.get_all_sub_services parent_id, page , per_page=5
where(:parent_id => parent_id).paginate(:page => page, :per_page => per_page)
end
def service_city
cities.order('name ASC').uniq
end
# def portfolio_service_price
# portfolio_services.last.price
# end
end
<file_sep>/app/controllers/admin/users_controller.rb
class Admin::UsersController < AdminController
load_and_authorize_resource
before_action :users_details, only: [:show]
before_action :status_value, only: [:update_service_status]
before_action :get_user_detail, only: [:user_service_request,:partner_service_request, :destroy, :partner_details]
before_action :params_page, only:[:user_service_request, :partner_service_request, :service_requests_list]
layout 'admin'
def index
@users = User.get_users(:user, @page)
end
def new
end
def show ; end
def destroy
if @user.destroy
redirect_to @user.partner? ? admin_partners_path : admin_users_path, notice: 'User is successfully destroyed.'
end
end
# Method for partner listing
def partners
@users = User.get_users(:partner, @page)
end
# Method to display the service request list of user
def user_service_request
@service_requests = @user.get_user_service_requests(@page)
end
# Method to display the partner's service request list of user
def partner_service_request
@service_requests = @user.get_partner_service_requests(@page)
end
# Method to display the partner's service request list of user
def service_requests_list
@service_requests = ServiceRequest.get_all_service_requests(@page)
end
# Method to find the details of all registerd partner bassed on ID
def partner_details
@portfolio = @user.portfolio
end
def manage_portfolio_status
@users = User.get_users(:partner)
@portfolio = @user.portfolio
@portfolio.update_attribute(:status, params[:value])
end
#update status of users service request
def update_service_status
if(@service_request and @status)
@service_request.update_attributes(status_id: @status.id)
UserMailer.accepted_rejected(@service_request.user, @service_request).deliver_now
flash[:success] = "Service request accepted "
redirect_to admin_partners_path
else
flash[:error] = "Service request not found!"
redirect_to admin_partners_path
end
end
private
def status_value
@status = Status.get_status_value(params[:status])
@service_request = ServiceRequest.find(params[:request_id])
end
def get_user_detail
@user = User.find(params[:id])
end
def users_details
@user = User.get_user_details(params[:id])
end
def params_page
@page = params[:page]
end
end
<file_sep>/db/migrate/20180904085537_add_documents_to_portfolios.rb
class AddDocumentsToPortfolios < ActiveRecord::Migration[5.2]
def change
add_column :portfolios, :documents, :json
end
end
<file_sep>/app/helpers/application_helper.rb
module ApplicationHelper
include FormElement
def current_user_role user
(user.has_role? :admin) ? "Admin" : ((user.has_role? :user) ? "User" : "Partner")
end
def service_select
Service.all.collect {|u| [u.name, u.id]}
end
def resource_name
:user
end
def resource
@resource ||= User.new
end
def devise_mapping
@devise_mapping ||= Devise.mappings[:user]
end
def google_map(center)
"https://maps.googleapis.com/maps/api/staticmap?center=#{center}&size=300x300&zoom=17"
end
def add_rating_html_block request
render partial: "shared/show_rating" , locals: {request: request, score: request.feedback.rating }
end
def dashboard_link user
if current_user.admin?
link_to "Admin dashboard", admin_services_path
elsif current_user.partner?
(link_to "Company dashboard", partner_dashboard_index_path)
elsif current_user.user?
link_to "Dashboard", dashboard_path
end
end
def back_button_link url
link_to t('.Back', :default => t("helpers.links.back")),url,:class => 'btn btn-primary'
end
end
<file_sep>/app/controllers/home_controller.rb
class HomeController < ApplicationController
before_action :authenticate_user!, except: [:index, :get_services_by_city]
def index
@services = Service.get_all_services(1,12)
end
def dashboard
@cities = City.ordered
end
def get_services_by_city
if params[:city_id].present?
@services = City.find(params[:city_id]).services.where(parent_id: nil)
else
@services = Service.get_all_services(1,12)
end
end
end
<file_sep>/db/migrate/20180829055635_create_portfolios.rb
class CreatePortfolios < ActiveRecord::Migration[5.2]
def change
create_table :portfolios do |t|
t.integer :service_id
t.integer :user_id
t.integer :city_id
t.string :gender
t.string :about
t.float :experience
t.string :education
t.attachment :avatar
t.timestamps
end
end
end
<file_sep>/app/controllers/service_requests_controller.rb
class ServiceRequestsController < ApplicationController
before_action :authenticate_user!
before_action :get_service_request, only: [:edit,:show, :update,:cancel_service_request]
def index
@service_requests = current_user.service_requests.includes(:service,:address,:status, :portfolio, :time_slot).ordered.paginate(:page => params[:page], :per_page => 5)
end
def create
@service_request = current_user.service_requests.create(service_request_params)
if @service_request.persisted?
UserMailer.service_request_generate(current_user,@service_request, 'user').deliver_now
UserMailer.service_request_generate(current_user,@service_request, 'partner').deliver_now
flash[:success] = "Service Request Placed Successfully!"
redirect_to '/service_requests'
else
flash[:error] = @service_request.errors.full_messages.join(",")
redirect_to '/dashboard'
end
end
def open_comment_pop_up
@request_id = params[:id]
@comments = ServiceRequest.comments_list
end
def edit
@cities = City.details
@addresses = current_user.get_all_address_from_service_city(@service_request)
end
def show; end
def update
# need to look better solution
if ( (params[:comment_popup] == "true") && ( params[:service_request][:comment] == '') )
params[:service_request][:comment] = params[:service_request][:select_comment]
end
service_request = @service_request.update_attributes(service_request_update_params)
if service_request
if current_user.partner?
redirect_to '/partner/dashboard'
elsif current_user.employee?
redirect_to '/employee'
else
redirect_to '/service_requests'
end
end
end
def cancel_service_request
status = Status.where(name: 'Cancelled').last
@service_request.update_attributes(status_id: status.id)
redirect_to '/service_requests'
end
def get_services
if params[:city_selection].present?
@city = City.find(params[:city_selection])
@addresses = @city.get_curent_user_address(current_user)
@services = @city.get_services
elsif params[:service_selection].present?
service = Service.find(params[:service_selection])
@city = City.find(params[:city_id])
@sub_services = service.sub_services
elsif params[:service_id]
@portfolios = PortfolioService.city_services(params[:service_id], params[:city_id]).map{|m| m.portfolio if m.portfolio.active?}.compact
@city = City.find(params[:city_id])
@service_id = params[:service_id]
elsif params[:date].present?
portfolio = Portfolio.find(params[:portfolio_id])
@time_slots = portfolio.available_time_slots(params[:date])
end
end
private
def get_service_request
@service_request = ServiceRequest.find(params[:id])
end
def service_request_params
params.require(:service_request).permit(:address_id, :user_id,:status_id, :portfolio_id,:service_request_number,:time_slot_id,:service_date).merge(service_id: params[:service_id])
end
def service_request_update_params
params.require(:service_request).permit(:address_id,:status_id, :portfolio_id,:time_slot_id, :service_id, :comment)
end
end
<file_sep>/app/helpers/home_helper.rb
module HomeHelper
def select_city cities
select_tag :city_selection, options_from_collection_for_select(cities, :id, :name), :prompt => "Select City",class: 'form-control', data: {remote: true,url: get_services_path,method: 'get'}
end
def user_request_count user, type
(type == 'service_requests') ? user.service_requests.count : user.addresses.count
end
end
<file_sep>/app/models/address.rb
class Address < ApplicationRecord
has_many :service_requests, dependent: :destroy
belongs_to :user
belongs_to :city, optional: true
# Validate form on server
validates :flat_number, presence: {message: 'Flat no is required!'}, if: :latitude_exists?
validates :flat_number, length: { in: 1..50, message: 'Flat no must have 1 to 50 characters!' }, if: :latitude_exists?
validates :street_name, length: { in: 5..150, message: 'Street name must have 5 to 150 characters!' }, if: :latitude_exists?
validates :pin_code, presence: {message: 'Pin code is required!'}, if: :latitude_exists?
validates :pin_code, length: { in: 5..10, message: 'Pin code must have 5 to 10 characters!' }, if: :latitude_exists?
validates :landmark, length: { in: 5..150, message: 'Landmark must have 5 to 150 characters!' }, if: :latitude_exists?
validates :city_id, presence: {message: 'At leaset 1 city should be selected!'}, if: :latitude_exists?
geocoded_by :address # address is an attribute of MyModel
after_validation :geocode
before_validation :reverse_geocode, on: :create
reverse_geocoded_by :latitude, :longitude do |address,results|
if geo = results.first
state = State.where(:name => geo.state).first
if state && geo.city
city = City.where(:name => geo.city).first
unless city
city = City.create(:name => geo.city, :state_id => state.id)
end
geo_address = geo.data["address"]
address.flat_number = geo_address["building"] || geo_address["house_number"] || geo_address["residential"] || geo_address["hotel"]
address.street_name = geo_address["locality"] || geo_address["university"] || geo_address["college"] || geo_address["village"]
address.landmark = geo_address["road"] || geo_address["suburb"]
address.city = city
address.pin_code = geo_address["postcode"]
else
address.errors.add(:base, "We are not providing service on this state & city")
end
end
end
def has_service_requests?
service_requests.present?
end
def complete_address
if google_address.present?
google_address
else
"#{flat_number} #{street_name} #{landmark}, #{city.try(:name)}, #{pin_code}"
end
end
def latitude_exists?
latitude.nil?
end
end
<file_sep>/app/controllers/partner/dashboard_controller.rb
class Partner::DashboardController < PartnerController
before_action :get_service_requests, only: [:index, :accept_reject]
before_action :get_employees, only: [:manage_employees]
def index
end
def accept_reject
service_request = ServiceRequest.find(params[:id])
if service_request
if Status::ACTION.include?(params[:value])
service_request.update_attributes(:status_id => Status.send(params[:value]).first.id)
UserMailer.accepted_rejected(current_user, service_request).deliver_now
end
end
end
def manage_employees
end
def manage_service_requests
@service_requests = current_user.portfolio.service_requests.includes(:status).accepted_request.order("id DESC")
@employees = current_user.employees-current_user.portfolio.service_requests.available_employees
end
def assign_service_request_to_user
if params[:service_request][:request_id].present?
ServiceRequest.find(params[:service_request][:request_id]).update(status: Status.inprogress.last, assignee_id: params[:service_request][:assignee_id] )
@service_requests = current_user.portfolio.service_requests.includes(:status).accepted_request.order("id DESC")
# flash[:success] = "User Assign Successfully"
else
@errors = "Assign request failed !"
end
end
private
def get_service_requests
@service_requests = current_user.portfolio.service_requests.includes(:status).order("id DESC")
end
def get_employees
@employees = current_user.employees
end
end
<file_sep>/db/migrate/20180918071705_add_comment_to_service_request.rb
class AddCommentToServiceRequest < ActiveRecord::Migration[5.2]
def change
add_column :service_requests, :comment, :text
end
end
<file_sep>/app/controllers/partner/company_services_controller.rb
class Partner::CompanyServicesController < PartnerController
before_action :find_portfolio_service, only: [:edit, :update, :destroy]
def index
@portfolio_services = current_user.portfolio.portfolio_services.order(:city_id).group_by(&:city_id)
end
def new
@sub_services = Service.get_sub_services
@portfolio_service = PortfolioService.new
end
def get_cities
sub_service_id = params[:portfolio_service][:service_id]
if sub_service_id.present?
@cities = Service.find(params[:portfolio_service][:service_id]).service.service_city - current_user.portfolio.remove_existing_service_city(sub_service_id)
end
end
# need to refactor
def create
city_ids = params[:portfolio_service][:city_ids]
if city_ids.present?
city_ids.each do |city_id|
service = PortfolioService.create(portfolio_service_params(city_id, current_user.portfolio.id))
end
flash[:success] = "Service created successfully!"
redirect_to partner_company_services_path
else
flash.now[:error] = "Please select a city and then proceed!"
render :new
end
end
def edit
end
def update
@portfolio_service.update_attributes(portfolio_service_update_params)
if @portfolio_service
flash[:success] = "Price Updated Successfully!"
redirect_to partner_company_services_path
else
flash[:error] = "Error"
render :edit
end
end
def destroy
@portfolio_service.destroy
flash[:success] = "Service deleted successfully!"
redirect_to partner_company_services_path
end
private
def portfolio_service_params city_id, portfolio_id
params.require(:portfolio_service).permit(:service_id, :price).merge(city_id: city_id, portfolio_id: portfolio_id)
end
def portfolio_service_update_params
params.require(:portfolio_service).permit(:price)
end
def find_portfolio_service
@portfolio_service = PortfolioService.find(params[:id])
end
end
<file_sep>/app/helpers/admin/users_helper.rb
module Admin::UsersHelper
def users_city user
user.addresses.map{|m| m.city.try(:name)}.join(', ')
end
def company_address user
user.portfolio.address
end
def company_name user
user.portfolio.company_name
end
def admin_status_action request
if (request.status.try(:name) == "Pending")
(link_to "Accept",update_service_status_admin_user_path(request_id: request.id,status: "Accepted"),:data => { :confirm => t('.confirm', :default => t("helpers.links.confirm", :default => 'Are you sure?')) },:class => 'btn btn-success btn-xs btn_margin', style: "color:#fff" ) +
(link_to "Reject",update_service_status_admin_user_path(request_id: request.id ,status: "Rejected"),:data => { :confirm => t('.confirm', :default => t("helpers.links.confirm", :default => 'Are you sure?')) },:class => 'btn btn-danger btn-xs', style: "color:#fff")
elsif (request.status.try(:name) == "Completed")
request.feedback.present? ? (render partial: "shared/show_rating" , locals: {request: request, score: request.feedback.rating }) : "Not Yet Rated"
end
end
end
<file_sep>/app/models/time_slot.rb
class TimeSlot < ApplicationRecord
has_many :service_requests
has_many :services, through: :service_requests
#scope methods
scope :ordered, -> {order('created_at')}
def start_time_with_end_time
start_time + '-' + end_time
end
end
<file_sep>/app/controllers/users/registrations_controller.rb
class Users::RegistrationsController < Devise::RegistrationsController
skip_before_action :authenticate_user!, only: [:new, :create], raise: false
prepend_before_action :require_no_authentication, only: [:new, :create, :cancel]
prepend_before_action :authenticate_scope!, only: [:edit, :update, :destroy]
prepend_before_action :set_minimum_password_length, only: [:new, :edit]
# GET /resource/sign_up
def new
build_resource
yield resource if block_given?
respond_with resource
end
def create
build_resource(sign_up_params)
resource.save
yield resource if block_given?
if resource.persisted?
sign_up(resource_name, resource)
resource.add_role sign_up_params[:role].to_sym
if(sign_up_params[:role] == "partner")
resource.create_portfolio
end
@url = after_registration_path(resource)
else
@errors = resource.errors.full_messages.join(", ")
end
end
protected
def after_registration_path resource
if (resource.has_role? :user)
root_url
elsif resource.has_role? :partner
edit_partner_portfolio_url
else
admin_root_url
end
end
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_up, keys: [:email, :password,:password_confirmation,:role, :first_name, :last_name,:phone])
end
def sign_up_params
params.require(:user).permit(:email, :password,:password_confirmation,:first_name,:role, :last_name,:phone)
end
end
<file_sep>/app/helpers/services_helper.rb
module ServicesHelper
def city_select
City.all.collect {|u| [u.name, u.id]}
end
def select_services services, city
label_tag 'services'
select_tag :service_selection, options_from_collection_for_select(services, :id, :name), :prompt => "Select Services", class: 'form-control', data: {remote: true,URL: get_services_path+ "?city_id=#{city.id}", method: 'get' }
end
def select_sub_services sub_services, city
select_tag :service_id, options_from_collection_for_select(sub_services, :id, :name), class: 'form-control', :prompt => "Select Sub Services", data: {remote: true,URL: get_services_path+ "?city_id=#{city.id}", method: 'get' }
end
def select_portfolio portfolios, city
select_tag 'service_request[portfolio_id]', options_from_collection_for_select(portfolios, :id, :company_name), :prompt => "Select Company", class: 'form-control', data: {remote: true,URL: get_services_path+ "?city_id=#{city.id}", method: 'get' }
end
def select_time_slot time_slots
select_tag 'service_request[time_slot_id]', options_from_collection_for_select(time_slots, :id, :start_time_with_end_time), :prompt => "Select Timeslot", class: 'form-control'
end
# def sub_service_price sub_services
# sub_services.price.present? ? sub_services.price : "0.00"
# end
end
<file_sep>/db/migrate/20180918070634_add_compnay_name_to_portfolio.rb
class AddCompnayNameToPortfolio < ActiveRecord::Migration[5.2]
def change
add_column :portfolios, :company_name, :string
add_column :portfolios, :company_ph_no, :string
end
end
<file_sep>/app/models/portfolio.rb
class Portfolio < ApplicationRecord
mount_uploaders :images, ImageUploader
mount_uploaders :documents, ImageUploader
belongs_to :user
has_many :portfolio_services
has_many :services, through: :portfolio_services
has_many :cities, through: :portfolio_services
has_many :service_requests
has_many :feedbacks
# validates :service, presence: true, on: :update
validates :about, :experience, :company_ph_no, :address, :company_name, presence: true, on: :update
has_attached_file :avatar, styles: { medium: "300x300>", thumb: "100x100>" }, default_url: "missing.jpeg"
validates_attachment_content_type :avatar, content_type: /\Aimage\/.*\z/
def portfolio_with_user_name_and_about
"#{user.try(:email)}"
end
def user_email
user.email
end
def active?
status == true
end
def portfolio_status
status ? "Active" : "In Active"
end
def reverse_portfolio_status
status ? "Deactivate" : "Activate"
end
def available_time_slots date
time_slots = service_requests.present? ? TimeSlot.ordered - service_requests.where(service_date: date).map{|m| m.time_slot}.compact : TimeSlot.ordered
(date.to_date.strftime("%d/%m/%Y") == Time.now.strftime("%d/%m/%Y")) ? select_time_slots(time_slots) : time_slots
end
def available_time_slots_for_update service_request
time_slots = service_requests.present? ? TimeSlot.ordered - service_requests.where(service_date: service_request.service_date).map{|m| m.time_slot}.compact : TimeSlot.ordered
all_slots = (service_request.service_date.strftime("%d/%m/%Y") == Time.now.strftime("%d/%m/%Y")) ? select_time_slots(time_slots) : time_slots
return all_slots << service_request.time_slot
end
def select_time_slots time_slots
current_time = Time.now
excluded_time_slot = current_time.strftime("%H") + ":00"
time_slots.select{|time| excluded_time_slot < time.start_time}
end
def get_all_services page, per_page=5
portfolio_services.order(:id).paginate(:page => page, :per_page => per_page)
end
def fetch_service_price service_id
portfolio_services.where(service_id: service_id).last.try(:price).to_s
end
def remove_existing_service_city(service_id)
portfolio_services.where(service_id: service_id).map{|ps| ps.city}
end
end<file_sep>/db/migrate/20180918100705_add_assignee_id_to_service_request.rb
class AddAssigneeIdToServiceRequest < ActiveRecord::Migration[5.2]
def change
add_column :service_requests, :assignee_id, :integer
end
end
<file_sep>/app/helpers/partner/dashboard_helper.rb
module Partner::DashboardHelper
def partner_action request
if ["Pending"].include?(request.status.try(:name))
(link_to 'Accept', accept_reject_partner_dashboard_index_path(value: "accepted", id: request.id), method: :put, :remote => true, class: " btn btn-success btn-xs btn_margin", data: { confirm: 'Are you sure?' }) +
(link_to 'Reject', accept_reject_partner_dashboard_index_path(value: "rejected", id:request.id ), method: :put, :remote => true, class: " btn btn-danger btn-xs", data: { confirm: 'Are you sure?' } ) +
(link_to 'Comment', open_comment_pop_up_path(id: request.id), method: :get, :remote => true, id: '#comment', class: " btn btn-primary btn-xs btn_margin")
elsif request.status.try(:name) == "Accepted"
(link_to 'Complete', accept_reject_partner_dashboard_index_path(value: "completed", id: request.id), method: :put, :remote => true, class: " btn btn-success btn-xs btn_margin", data: { confirm: 'Are you sure?' } ) +
(link_to 'In Complete', accept_reject_partner_dashboard_index_path(value: "incompleted", id: request.id), method: :put, :remote => true, class: " btn btn-danger btn-xs", data: { confirm: 'Are you sure?' } )
elsif (request.status.try(:name) == "Completed")
request.feedback.present? ? add_rating_html_block(request) : "Not Yet Rated"
elsif (request.status.try(:name) == "InCompleted") || (request.status.try(:name) == "Onhold")
(link_to 'Comment', open_comment_pop_up_path(id: request.id), method: :get, :remote => true, id: '#comment', class: " btn btn-primary btn-xs btn_margin")
else
"-"
end
end
def employee_actions employee
link_to edit_partner_user_path(employee),class: 'btn btn-default btn-xs' do
content_tag(:span, '',class: 'glyphicon glyphicon-edit')
end
end
end
<file_sep>/config/routes.rb
Rails.application.routes.draw do
namespace :partner do
get 'company_services/index'
end
devise_for :users
# get 'home/index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
root to: "home#index"
resources :services do
member do
get 'sub_services'
get 'create_sub_services'
end
end
resources :addresses
resources :feedbacks
resources :service_requests do
member do
patch 'cancel_service_request'
end
end
#custom routes for user login and logout
devise_scope :user do
post 'login', to: 'users/sessions#create'
delete 'logout', to: 'users/sessions#destroy'
get 'sign_up', to: 'users/registrations#new'
post 'create_user', to: 'users/registrations#create'
post 'send_password_instructions', to: 'users/passwords#create'
put 'change_password', to: 'users/passwords#update'
get 'partner/invite_employee', to: 'users/invitations#new'
post 'send_invitation', to: 'users/invitations#create'
put 'update_employee_detail', to: 'users/invitations#update'
get 'accept_invitation', to: 'users/invitations#edit'
end
#custom routes for admin
namespace :admin do
root to: "dashboard#index"
resources :services do
member do
get 'sub_services'
get 'create_sub_services'
get 'edit_sub_services'
put 'update_sub_services'
end
end
resources :users do
member do
get 'user_service_request'
get 'update_service_status'
put 'manage_portfolio_status'
end
end
end
namespace :partner do
resource :portfolio do
collection do
get 'get_subservices'
get 'get_city_service_list'
post 'upload_photos'
end
member do
delete 'delete_photo'
end
end
resources :dashboard, only: [:index] do
collection do
put "accept_reject"
get "manage_employees"
get "manage_service_requests"
end
end
resources :users
resources :company_services
end
namespace :employee do
resource :employees
root to: "service_requests#index"
resources :service_requests, only: [:index] do
collection do
put "accept_reject"
end
end
end
#Singular routes for few methods
post 'create_comment', to: 'service_requests#update'
get 'open_comment_pop_up', to: 'service_requests#open_comment_pop_up'
get 'get_services_by_city', to: 'home#get_services_by_city'
get 'dashboard', to: 'home#dashboard'
get 'get_services', to: 'service_requests#get_services'
get 'get_states_and_cities', to: 'addresses#get_states_and_cities'
get 'admin/partners', :to => 'admin/users#partners'
get 'admin/partners/:id/partner_service_request', :to => 'admin/users#partner_service_request', :as => 'partner_service_request'
get 'admin/partner/:id', :to => 'admin/users#partner_details', :as => 'partner_details'
put 'partner/assign_service_request', :to => 'partner/dashboard#assign_service_request_to_user', :as => 'assign_service_requests_to_employee'
get 'admin/service-requests', :to => 'admin/users#service_requests_list', :as => 'service_requests_list'
get 'get_cities', to: 'partner/company_services#get_cities'
end
<file_sep>/README.md
### README
* Description
The business of this application is to provide services in perticular location.
for example admin can create services in this portal and companies can register here and provide service services
in every location provided by admin.
Once company is registered on the portal and start providing service then end user can create service request as
per their need.
* Ruby version (2.5.1)
* Steps to set up application
* Clone the Application
* Run the bundle command
* rails db:create
* rails db:migrate
* rails db:seed
* rails s
* Defult login credentials
Currently we have three roles in our application Admin, Partner(Company), End User
Admin Credentials: email: <EMAIL> , password: <PASSWORD>
Partner Credentials: email: <EMAIL> , password: <PASSWORD>
End User Credentials: email: <EMAIL> , password: <PASSWORD>
* Note: Please change the data in seed file as per your need
<file_sep>/db/seeds.rb
# This file should contain all the record creation needed to seed the database with its default values.
p "Importing countries data to table."
Country.create(code: 'USA', name: 'United States Of America')
p "Importing States data to table."
State.create(name: 'New York', country_id: 1)
State.create(name: 'New Jersey', country_id: 1)
p "...................Importing city in database.................."
City.create(name: 'New York City', state_id: 1)
City.create(name: 'Buffalo', state_id: 1)
City.create(name: 'Atlantic City', state_id: 2)
City.create(name: 'Jersey City', state_id: 2)
#Admin user create
user = User.create(:email => "<EMAIL>", :password => "<PASSWORD>", :first_name => "admin", :phone => "+1623454569")
user.add_role :admin
#Partner user create
user = User.create(:email => "<EMAIL>", :password => "<PASSWORD>", :first_name => "Partner", :phone => "+17340474594")
user.add_role :partner
user.create_portfolio
#normal user
user = User.create(:email => "<EMAIL>", :password => "<PASSWORD>", :first_name => "user", :phone => "+1895640345")
user.add_role :user
Status.create(id:1, name: "Pending")
Status.create(id:2, name: "Accepted")
Status.create(id:3, name: "Rejected")
Status.create(id:4, name: "Inprogress")
Status.create(id:5, name: "Cancelled")
Status.create(id:6, name: "Completed")
Status.create(id:7, name: "InCompleted")
Status.create(id:8, name: "Onhold")
TimeSlot.delete_all
TimeSlot.create(start_time: "00:00", end_time: "01:00")
TimeSlot.create(start_time: "01:00", end_time: "02:00")
TimeSlot.create(start_time: "02:00", end_time: "03:00")
TimeSlot.create(start_time: "03:00", end_time: "04:00")
TimeSlot.create(start_time: "04:00", end_time: "05:00")
TimeSlot.create(start_time: "05:00", end_time: "06:00")
TimeSlot.create(start_time: "06:00", end_time: "07:00")
TimeSlot.create(start_time: "07:00", end_time: "08:00")
TimeSlot.create(start_time: "08:00", end_time: "09:00")
TimeSlot.create( start_time: "09:00", end_time: "10:00")
TimeSlot.create( start_time: "10:00", end_time: "11:00")
TimeSlot.create( start_time: "11:00", end_time: "12:00")
TimeSlot.create( start_time: "12:00", end_time: "13:00")
TimeSlot.create( start_time: "13:00", end_time: "14:00")
TimeSlot.create( start_time: "14:00", end_time: "15:00")
TimeSlot.create( start_time: "15:00", end_time: "16:00")
TimeSlot.create( start_time: "16:00", end_time: "17:00")
TimeSlot.create( start_time: "17:00", end_time: "18:00")
TimeSlot.create( start_time: "18:00", end_time: "19:00")
TimeSlot.create( start_time: "19:00", end_time: "20:00")
TimeSlot.create( start_time: "20:00", end_time: "21:00")
TimeSlot.create( start_time: "21:00", end_time: "22:00")
TimeSlot.create( start_time: "22:00", end_time: "23:00")
TimeSlot.create( start_time: "23:00", end_time: "24:00")
<file_sep>/app/controllers/users/passwords_controller.rb
# # frozen_string_literal: true
class Users::PasswordsController < Devise::PasswordsController
prepend_before_action :require_no_authentication
# Render the #edit only if coming from a reset password email link
append_before_action :assert_reset_token_passed, only: :edit
def new
self.resource = resource_class.new
end
def create
self.resource = resource_class.send_reset_password_instructions(resource_params)
yield resource if block_given?
if successfully_sent?(resource)
flash[:success] = "A reset link has been sent to your registered email "
else
flash[:email] = "The email you entered is not registered with us"
end
redirect_to after_sending_reset_password_instructions_path_for
end
def edit
self.resource = resource_class.new
set_minimum_password_length
resource.reset_password_token = params[:reset_password_token]
end
def update
self.resource = resource_class.reset_password_by_token(resource_params)
yield resource if block_given?
if resource.errors.empty?
if Devise.sign_in_after_reset_password
flash_message = resource.active_for_authentication? ? :updated : :updated_not_active
set_flash_message!(:notice, flash_message)
sign_in(resource_name, resource)
else
set_flash_message!(:notice, :updated_not_active)
end
redirect_to root_path
else
set_minimum_password_length
flash[:error] = "Error"
redirect_to '/'
end
end
protected
def after_update_path_for(resource)
root_path
end
def after_resetting_password_path_for(resource)
root_path
end
# The path used after sending reset password instructions
def after_sending_reset_password_instructions_path_for
root_path
end
end
<file_sep>/app/models/city.rb
class City < ApplicationRecord
#associations
# has_and_belongs_to_many :services
has_many :addresses
has_many :portfolio_services
has_many :portfolios, through: :portfolio_services, foreign_key: "city_id"
has_many :services, through: :portfolio_services
#scope methods
scope :details, proc{ |state_id| self.where(:state_id => state_id).order(name: :desc) if state_id.present? }
scope :ordered, -> {order('name')}
def get_curent_user_address user
addresses.where(user_id: user.id)
end
def get_services
portfolio_services.where(portfolio_id: nil).map{|m| m.service}
end
end
<file_sep>/test/system/sub_services_test.rb
require "application_system_test_case"
class SubServicesTest < ApplicationSystemTestCase
setup do
@sub_service = sub_services(:one)
end
test "visiting the index" do
visit sub_services_url
assert_selector "h1", text: "Sub Services"
end
test "creating a Sub service" do
visit sub_services_url
click_on "New Sub Service"
fill_in "Name", with: @sub_service.name
fill_in "Service", with: @sub_service.service_id
click_on "Create Sub service"
assert_text "Sub service was successfully created"
click_on "Back"
end
test "updating a Sub service" do
visit sub_services_url
click_on "Edit", match: :first
fill_in "Name", with: @sub_service.name
fill_in "Service", with: @sub_service.service_id
click_on "Update Sub service"
assert_text "Sub service was successfully updated"
click_on "Back"
end
test "destroying a Sub service" do
visit sub_services_url
page.accept_confirm do
click_on "Destroy", match: :first
end
assert_text "Sub service was successfully destroyed"
end
end
<file_sep>/app/mailers/user_mailer.rb
class UserMailer < ApplicationMailer
def service_request_generate user, service_request, role
@name = service_request.user.full_name
@service_request_number = service_request.service_request_number
mail(to: ((role == 'user') ? user.email : service_request.portfolio.user_email) , subject: "Service Request Generated", template_name: ((role == 'user') ? 'service_request_generate' : 'partner_service_request_generate' ))
end
def accepted_rejected user, service_request
@name = service_request.user.full_name
@service_request_number = service_request.service_request_number
@status = service_request.status.name
mail(to: service_request.user.email, subject: "Service Request", template_name: 'accepted_rejected')
end
end
<file_sep>/app/assets/javascripts/application.js
// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, or any plugin's
// vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file. JavaScript code in this file should be added after the last require_* statement.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details
// about supported directives.
//
//= require jquery
//= require rails-ujs
//= require moment
//= require twitter/bootstrap
//= require activestorage
//= require turbolinks
//= require_tree .
$(function(){
$('select').change(function () {
$('.loading').show();
})
})
$(document).on('ready turbolinks:load', function() {
$('.side-menu a').filter(function(){
return this.href==location.href}).addClass('active').siblings('a').removeClass('active')
$('.side-menu a').click(function(){
$(this).addClass('active').siblings().removeClass('active')
})
})
// function add_date_picker_function(){
// $('#datetimepicker1').datetimepicker({format: 'YYYY/MM/DD',minDate: '0'}).on('dp.change',function(e){
// $.get( '#{get_services_path}', { city_id: $('#city_selection').val(),
// portfolio_id: $('#service_request_portfolio_id').val(),date: $('#date_value').val()})
// });
// }<file_sep>/app/models/portfolio_service.rb
class PortfolioService < ApplicationRecord
belongs_to :service
belongs_to :portfolio, optional: true
belongs_to :city
scope :city_services, -> (service_id, city_id) { where(service_id: service_id, city_id: city_id) }
end
<file_sep>/db/migrate/20180910125230_add_price_to_services.rb
class AddPriceToServices < ActiveRecord::Migration[5.2]
def change
add_column :services, :price, :decimal, precision: 15, scale: 10
end
end
<file_sep>/db/city.rb
City.create(:name=>"Siliguri",:state_id=>1260)
City.create(:name=>"Asansol",:state_id=>1260)
City.create(:name=>"Kharagpur",:state_id=>1260)
City.create(:name=>"Baharampur",:state_id=>1260)
City.create(:name=>"Hugli-Chinsurah",:state_id=>1260)
City.create(:name=>"Raiganj",:state_id=>1260)
City.create(:name=>"Jalpaiguri",:state_id=>1260)
City.create(:name=>"Habra",:state_id=>1260)
City.create(:name=>"Ranaghat",:state_id=>1260)
City.create(:name=>"Darjiling",:state_id=>1260)
City.create(:name=>"Purulia",:state_id=>1260)
City.create(:name=>"Murshidabad",:state_id=>1260)
City.create(:name=>"Adra",:state_id=>1260)
City.create(:name=>"Kolkata",:state_id=>1260)
City.create(:name=>"Lucknow",:state_id=>1261)
City.create(:name=>"Kanpur",:state_id=>1261)
City.create(:name=>"Firozabad",:state_id=>1261)
City.create(:name=>"Agra",:state_id=>1261)
City.create(:name=>"Meerut",:state_id=>1261)
City.create(:name=>"Varanasi",:state_id=>1261)
City.create(:name=>"Allahabad",:state_id=>1261)
City.create(:name=>"Moradabad",:state_id=>1261)
City.create(:name=>"Aligarh",:state_id=>1261)
City.create(:name=>"Saharanpur",:state_id=>1261)
City.create(:name=>"Noida",:state_id=>1261)
City.create(:name=>"Jhansi",:state_id=>1261)
City.create(:name=>"Shahjahanpur",:state_id=>1261)
City.create(:name=>"Modinagar",:state_id=>1261)
City.create(:name=>"Hapur",:state_id=>1261)
City.create(:name=>"<NAME>",:state_id=>1261)
City.create(:name=>"Vrindavan",:state_id=>1261)
City.create(:name=>"Mathura",:state_id=>1261)
City.create(:name=>"Rudrapur",:state_id=>1261)
City.create(:name=>"Chennai",:state_id=>1263)
City.create(:name=>"Coimbatore",:state_id=>1263)
City.create(:name=>"Madurai",:state_id=>1263)
City.create(:name=>"Tiruchirappalli",:state_id=>1263)
City.create(:name=>"Tirunelveli",:state_id=>1263)
City.create(:name=>"Ranipet",:state_id=>1263)
City.create(:name=>"Vellore",:state_id=>1263)
City.create(:name=>"Rameshwaram",:state_id=>1263)
City.create(:name=>"Viswanatham",:state_id=>1263)
City.create(:name=>"Jaipur",:state_id=>1265)
City.create(:name=>"Jodhpur",:state_id=>1265)
City.create(:name=>"Bikaner",:state_id=>1265)
City.create(:name=>"Udaipur",:state_id=>1265)
City.create(:name=>"Ajmer",:state_id=>1265)
City.create(:name=>"Bhilwara",:state_id=>1265)
City.create(:name=>"Alwar",:state_id=>1265)
City.create(:name=>"Ratangarh",:state_id=>1265)
City.create(:name=>"Mount Abu",:state_id=>1265)
City.create(:name=>"Ludhiana",:state_id=>1266)
City.create(:name=>"Patiala",:state_id=>1266)
City.create(:name=>"Amritsar",:state_id=>1266)
City.create(:name=>"Jalandhar",:state_id=>1266)
City.create(:name=>"Bathinda",:state_id=>1266)
City.create(:name=>"Pathankot",:state_id=>1266)
City.create(:name=>"Mohali",:state_id=>1266)
City.create(:name=>"Firozpur",:state_id=>1266)
City.create(:name=>"Zirakpur",:state_id=>1266)
City.create(:name=>"Fazilka",:state_id=>1266)
City.create(:name=>"Pondicherry",:state_id=>1267)
City.create(:name=>"Karaikal",:state_id=>1267)
City.create(:name=>"Bhubaneswar",:state_id=>1268)
City.create(:name=>"Cuttack",:state_id=>1268)
City.create(:name=>"Raurkela",:state_id=>1268)
City.create(:name=>"Puri",:state_id=>1268)
City.create(:name=>"Mumbai",:state_id=>1273)
City.create(:name=>"Pune",:state_id=>1273)
City.create(:name=>"Nagpur",:state_id=>1273)
City.create(:name=>"Nashik",:state_id=>1273)
City.create(:name=>"Lonavla",:state_id=>1273)
City.create(:name=>"Shirdi",:state_id=>1273)
City.create(:name=>"Indore",:state_id=>1274)
City.create(:name=>"Bhopal",:state_id=>1274)
City.create(:name=>"Jabalpur",:state_id=>1274)
City.create(:name=>"Gwalior",:state_id=>1274)
City.create(:name=>"Ujjain",:state_id=>1274)
City.create(:name=>"Kochi",:state_id=>1276)
City.create(:name=>"Thiruvananthapuram",:state_id=>1276)
City.create(:name=>"Kozhikode",:state_id=>1276)
City.create(:name=>"Kollam",:state_id=>1276)
City.create(:name=>"Palakkad",:state_id=>1276)
City.create(:name=>"Bengaluru",:state_id=>1277)
City.create(:name=>"Mangaluru",:state_id=>1277)
City.create(:name=>"Kolar",:state_id=>1277)
City.create(:name=>"Udupi",:state_id=>1277)
City.create(:name=>"Vijayapura",:state_id=>1277)
City.create(:name=>"Srinagar",:state_id=>1278)
City.create(:name=>"Jammu",:state_id=>1278)
City.create(:name=>"Rajauri",:state_id=>1278)
City.create(:name=>"Udhampur",:state_id=>1278)
City.create(:name=>"Shimla",:state_id=>1279)
City.create(:name=>"Mandi",:state_id=>1279)
City.create(:name=>"Solan",:state_id=>1279)
City.create(:name=>"Palampur",:state_id=>1279)
City.create(:name=>"Faridabad",:state_id=>1280)
City.create(:name=>"Gurgaon",:state_id=>1280)
City.create(:name=>"Panchkula",:state_id=>1280)
City.create(:name=>"Pinjore",:state_id=>1280)
City.create(:name=>"Ahmedabad",:state_id=>1281)
City.create(:name=>"Surat",:state_id=>1281)
City.create(:name=>"Vadodara",:state_id=>1281)
City.create(:name=>"Godhra",:state_id=>1281)
City.create(:name=>"Panaji",:state_id=>1283)
City.create(:name=>"Goa",:state_id=>1283)
City.create(:name=>"Patna",:state_id=>1287)
City.create(:name=>"Motihari",:state_id=>1287)
City.create(:name=>"Bettiah",:state_id=>1287)
City.create(:name=>"Muzaffarpur",:state_id=>1287)
City.create(:name=>"Raxaul",:state_id=>1287)
City.create(:name=>"Bhagalpur",:state_id=>1287)
City.create(:name=>"Guwahati",:state_id=>1288)
City.create(:name=>"Silchar",:state_id=>1288)
City.create(:name=>"Dibrugarh",:state_id=>1288)
City.create(:name=>"Tezpur",:state_id=>1288)
City.create(:name=>"Naharlagun",:state_id=>1289)
City.create(:name=>"Pasighat",:state_id=>1289)
City.create(:name=>"Visakhapatnam",:state_id=>1290)
City.create(:name=>"Vijayawada",:state_id=>1290)
City.create(:name=>"Tirupati",:state_id=>1290)
City.create(:name=>"Rajampet",:state_id=>1290)
City.create(:name=>"Hyderabad",:state_id=>1290)
City.create(:name=>"Dhanbad",:state_id=>1293)
City.create(:name=>"Ranchi",:state_id=>1293)
City.create(:name=>"Jamshedpur",:state_id=>1293)
City.create(:name=>"Bokaro Steel City",:state_id=>1293)
City.create(:name=>"Hardwar",:state_id=>1294)
City.create(:name=>"Rishikesh",:state_id=>1294)
City.create(:name=>"Nainital",:state_id=>1294)
City.create(:name=>"Dehradun",:state_id=>1294)
City.create(:name=>"Mussoorie",:state_id=>1294)
<file_sep>/test/controllers/sub_services_controller_test.rb
require 'test_helper'
class SubServicesControllerTest < ActionDispatch::IntegrationTest
setup do
@sub_service = sub_services(:one)
end
test "should get index" do
get sub_services_url
assert_response :success
end
test "should get new" do
get new_sub_service_url
assert_response :success
end
test "should create sub_service" do
assert_difference('SubService.count') do
post sub_services_url, params: { sub_service: { name: @sub_service.name, service_id: @sub_service.service_id } }
end
assert_redirected_to sub_service_url(SubService.last)
end
test "should show sub_service" do
get sub_service_url(@sub_service)
assert_response :success
end
test "should get edit" do
get edit_sub_service_url(@sub_service)
assert_response :success
end
test "should update sub_service" do
patch sub_service_url(@sub_service), params: { sub_service: { name: @sub_service.name, service_id: @sub_service.service_id } }
assert_redirected_to sub_service_url(@sub_service)
end
test "should destroy sub_service" do
assert_difference('SubService.count', -1) do
delete sub_service_url(@sub_service)
end
assert_redirected_to sub_services_url
end
end
<file_sep>/app/models/user.rb
class User < ApplicationRecord
rolify
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :registerable, :recoverable, :rememberable, :validatable, :invitable
attr_accessor :role
# validates :first_name, :email, :phone, presence: true
# validates :password, :presence => true, :confirmation => true, :length => {:within => 6..128}
has_one :portfolio
has_many :service_requests
has_many :addresses
has_many :feedbacks
has_many :employees,class_name: "User", foreign_key: "invited_by_id"
has_many :assigned_service_requests, class_name: "ServiceRequest", foreign_key: "assignee_id"
def admin?
has_role? :admin
end
def partner?
has_role? :partner
end
def user?
has_role? :user
end
def employee?
has_role? :employee
end
def get_all_address_from_service_city service_request
addresses.where(city_id: service_request.address.city_id)
end
def full_name
[first_name, last_name].select(&:present?).join(' ').titleize
end
def phone_no
phone.present? ? phone : "-"
end
def self.get_users role, page=1
includes(:addresses).with_role(role).paginate(:page => page, :per_page => 5)
end
def self.get_user_details id
includes(:addresses).find(id)
end
def get_user_service_requests page
service_requests.includes(:service,:address,:status, :portfolio, :time_slot).ordered.paginate(:page => page, :per_page => 5)
end
def get_partner_service_requests page
portfolio.service_requests.includes(:service,:address,:status, :portfolio, :time_slot).ordered.paginate(:page => page, :per_page => 5)
end
def portfolio_status
portfolio.portfolio_status
end
end
| 8560b0318ec22074f559fe5c04b489f39fb269f3 | [
"JavaScript",
"Ruby",
"Markdown"
] | 67 | Ruby | Santoshgautam/RServices_RoR_BS | 792bc411e6b7444b0a23099c7fae94ec4dc616a5 | ab53fef9e5fc0d8f81a1c6b3dd7ce228154da857 |
refs/heads/master | <file_sep>function valider() {
var password = document.getElementById("password").value;
var confirmPassword = document.getElementById("confirmpassword").value;
var longueur = password.length;
if (password != confirmPassword) {
alert("Les mots de passe ne correspondent pas");
return false;
} else if(longueur < 8) {
alert("Veuillez entrer un mot de passe plus long (8 caractères minimum)");
return false;
} else {
alert("Formulaire complet.");
}
}
| 770f9edb618ce744d085f4fad5b4204218bc6996 | [
"JavaScript"
] | 1 | JavaScript | Aymeric-kun/TP1-JANNOT-Aymeric | 7120954c0c78819041d8344a02bda1d50b6f2d4c | 80d94a82baa4a69dd3d5d5cd6be000570195d1b1 |
refs/heads/main | <file_sep>import { Component, OnInit } from "@angular/core";
import { AlertController } from "@ionic/angular";
import { CartService } from "src/app/core-modules/services/cart/cart-state/cart.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
@Component({
selector: "app-home",
templateUrl: "./home.page.html",
styleUrls: ["./home.page.scss"],
})
export class HomePage implements OnInit {
public show_top_picks;
public products$;
public liked
public show_liked
constructor(
private cartService: CartService,
public alertController: AlertController,
private _productFacade: ProductFacadeService,
private headerState: HeaderStateService
) {}
ngOnInit() {
//load all products
this._productFacade.loadProducts();
this._productFacade.loadUserlikedProducts();
this.products$ = this._productFacade.getUserlikedProducts();
this.products$.subscribe(
res => {
if(res.length==0){
this.show_liked = false
}else{
this.show_liked = true
}
this.liked = res
console.log(this.show_liked);
}
)
}
addToCart(product) {
this.cartService.addProduct(product);
}
}
<file_sep>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { IonicModule } from '@ionic/angular';
import { UserAdditionPage } from './user-addition.page';
describe('UserAdditionPage', () => {
let component: UserAdditionPage;
let fixture: ComponentFixture<UserAdditionPage>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ UserAdditionPage ],
imports: [IonicModule.forRoot()]
}).compileComponents();
fixture = TestBed.createComponent(UserAdditionPage);
component = fixture.componentInstance;
fixture.detectChanges();
}));
it('should create', () => {
expect(component).toBeTruthy();
});
});
<file_sep>const express = require("express");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const async = require("async");
const router = express.Router();
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck");
// Load Product model
const Store = require("../../../models/store/Store");
const Voucher = require("../../../models/store/Voucher");
const Product = require("../../../models/category/Product");
//get voucher by id
router.post("/retrieve_voucher", (req, res) => {
console.log(req.body.id);
Voucher.findOne({ _id: req.body.id })
.then((voucher) => {
res.json({
voucher: voucher,
});
})
.catch((err) => res.json({ err: err }));
});
//get voucher by id
router.post("/retrieve_voucher_by_number", (req, res) => {
Voucher.findOne({ voucher_number: req.body.voucher_number })
.then((voucher) => {
console.log(voucher);
res.json({
voucher: voucher,
});
})
.catch((err) => res.json({ err: err }));
});
//get voucher by store id
router.get("/retrieve_store_voucher", authCheck, (req, res) => {
let user = req.authData;
Voucher.find({ store_id: user.store_id })
.then((vouchers) => {
res.json({
vouchers: vouchers,
});
})
.catch((err) => res.json({ err: err }));
});
// get all vouchers
router.get("/retrieve_all_vouchers", (req, res) => {
Voucher.find()
.then((vouchers) => {
res.json({
vouchers: vouchers,
});
})
.catch((err) => res.json({ err: err }));
});
router.post("/retrieve_all_vouchersById", (req, res) => {
let id = req.body.id;
Voucher.find({ store_id: id })
.then((vouchers) => {
res.json({
vouchers: vouchers,
});
})
.catch((err) => res.json({ err: err }));
});
// store temp order
router.post("/create_voucher", authCheck, (req, res) => {
let data = req.body.promo;
let store_id = req.body.store_id;
// Generate order Number
function randomString(length, chars) {
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
let length = 10;
let chars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
console.log("hello", req.body)
let products = data.products; //array of product in voucher
let voucher_number = randomString(length, chars);
// get store Id
Store.findOne({ _id: store_id }).then((store) => {
let quota;
let discount;
let items_exceeding;
let m_array = products.map((product) => {
//get primary product
let modified_product = Product.findOne({ _id: product.p_id })
.then((prod) => {
//get secondary_product
if (product.s_id) {
let m_p = Product.findOne({ _id: product.s_id })
.then((p) => {
product = { ...product, prod: prod, s_pro: p };
return product;
})
.catch((err) => console.log(err));
} else {
return (product = { ...product, prod: prod, s_pro: null });
}
return m_p;
})
.catch((err) => console.log(err));
return modified_product;
});
Promise.all(m_array).then(function (results) {
const voucher = new Voucher({
voucher_number: voucher_number,
store_id: store._id,
run_sale: data.run_sale,
type: data.type,
title: data.title,
products: results, //modify objects array of product objects
total_quota: data.total_quota,
platform: data.platform,
exp_date: data.exp_date,
});
voucher.save().then((voucher) => {
if (voucher.run_sale) {
// Update Products -- Sale
async.each(voucher.products, function (product, callback) {
if (product) {
Product.findOne({ _id: product.p_id }).then((product) => {
product.sale = true;
product.sale_type = voucher._id;
product.save();
});
}
});
} else {
// Update Products -- Promo
async.each(data.products, function (product, callback) {
if (product) {
Product.findOne({ _id: product.p_id }).then((product) => {
product.promo = true;
product.promo_type = voucher._id;
product.save();
});
}
});
}
res.json({ voucher: voucher });
});
});
});
});
module.exports = router;
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { ReturnPolicyPageRoutingModule } from './return-policy-routing.module';
import { ReturnPolicyPage } from './return-policy.page';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
ReturnPolicyPageRoutingModule
],
declarations: [ReturnPolicyPage]
})
export class ReturnPolicyPageModule {}
<file_sep>import { Component, OnInit } from "@angular/core";
import {
FormGroup,
FormBuilder,
Validators,
} from "@angular/forms";
import { Router } from "@angular/router";
import { AuthApiService } from "src/app/core-modules/services/auth/auth-api/auth-api.service";
import { PasswordValidator } from "src/app/modules/auth/shared/password.validator";
import { ToastController } from "@ionic/angular";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
import { HeaderStateService } from 'src/app/core-modules/services/header/header-state/header-state.service'
import { UserFacadeService } from 'src/app/core-modules/services/profile/profile-facade/profile-facade.service';
import { MenuStateService } from 'src/app/core-modules/services/menus/menu-state/menu-state.service';
@Component({
selector: "app-register",
templateUrl: "./register.page.html",
styleUrls: ["./register.page.scss"],
})
export class RegisterPage implements OnInit {
public Submit_load;
public submitted: boolean;
public picture_spinner = false;
//initialize new empty form-group of type FormGroup
private registrationForm: FormGroup;
public register_store: boolean = false;
public picture_uploaded: any;
constructor(
public toastController: ToastController,
private formBuilder: FormBuilder,
private _authService: AuthApiService,
private router: Router,
private productApi: ProductApiService,
private headerState: HeaderStateService,
public menuState: MenuStateService,
private userFacade: UserFacadeService
) { }
ngOnInit() {
this.Submit_load = true;
//create new form-group instance
this.registrationForm = this.formBuilder.group(
{
//create instances of form-control
email: [
"",
Validators.compose([
Validators.maxLength(50),
Validators.pattern(
"^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$"
),
Validators.required,
]),
],
password: ["", [Validators.required, Validators.minLength(6)]],
confirmPassword: ["", Validators.required],
},
{ validator: PasswordValidator }
);
}
onSubmit() {
if (this.picture_uploaded) { //this should check if the is a picture uploaded before proceeding
if (this.submitted) {
this.presentToast("already submitted");
} else {
this.Submit_load = false;
this.submitted = true;
this.presentToast("submitting...");
let registered = {
...this.registrationForm.value,
profileImage: this.picture_uploaded,
};
//register to service and subscribe to observable
this._authService.userRegister(registered).subscribe(
(response) => {
localStorage.setItem("token", response.token);
this.headerState.policy_header = true
//set user state
this.userFacade.loadUser();
//set menu state
this.menuState.updateMenuStatus("user");
this.router.navigate(["/about"], {
state: { data: response, rs: this.register_store },
});
this.Submit_load = true;
this.presentToast("Profile Registered successfully ");
},
(error) => {
this.submitted = false;
this.Submit_load = true;
this.presentToast("fill in the form or Try a different email address ");
console.error("Error", error);
}
);
}
} else {
this.presentToast("Upload a picture first !!")
}
}
sendToParent() { }
sendToggleVlaue(data) {
this.register_store = data.detail.checked;
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 3000,
position: "bottom",
});
toast.present();
}
handleFileInput(event) {
console.log(event)
let file = event.target.files[0];
if (file.type == "image/jpeg" || file.type == "image/png") {
const _formData = new FormData();
_formData.append("image", file);
//save picture
this.productApi.uploadImage(_formData).subscribe((res) => {
this.picture_spinner = true
this.picture_uploaded = res.data;
});
}
}
async presentSaleToast(data) {
const toast = await this.toastController.create({
message: data,
position: "bottom",
animated: true,
duration: 3000,
});
toast.present();
}
}
<file_sep>const express = require("express");
const passport = require("passport");
const jwt = require("jwt-simple");
const keys = require("../../../config/users/keys");
const frontHost = require("../../../validation/config/front/server");
const router = express.Router();
//auth with google
router.get(
"/google",
passport.authenticate("google", {
scope: [
"https://www.googleapis.com/auth/userinfo.profile",
"https://www.googleapis.com/auth/userinfo.email",
],
})
);
//callback route google strategy
router.get(
"/google/redirect",
passport.authenticate("google", { failureRedirect: "/api/users/login" }),
(req, res) => {
//redirect the user to Front End
const payload = {
id: req.user.googleId,
};
const encryptedGoogleId = jwt.encode(payload, keys.secretOrKey);
return res.redirect(
frontHost.server + "user/google/?id=" + encryptedGoogleId
);
}
);
//Logout
router.get("/logout", (req, res) => {
//handle with passport
req.logout();
res.redirect(frontHost.server + "user/login/");
});
module.exports = router;
<file_sep>import { Component, OnDestroy, OnInit } from "@angular/core";
import { Platform } from "@ionic/angular";
import { SplashScreen } from "@ionic-native/splash-screen/ngx";
import { StatusBar } from "@ionic-native/status-bar/ngx";
import { NavigationStart, Router } from "@angular/router";
import { AlertController } from "@ionic/angular";
import { Observable, Observer, fromEvent, merge, Subscription } from "rxjs";
import { map } from "rxjs/operators";
import { distinctUntilChanged } from "rxjs/operators";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { AuthApiService } from "./core-modules/services/shared/authentication/auth-api/auth-api.service";
import { MenuStateService } from "./core-modules/services/utils/menu-state/menu-state.service";
import { UserFacadeService } from "./core-modules/services/shared/profile/profile-facade/profile-facade.service";
export let browserRefresh = false;
@Component({
selector: "app-root",
templateUrl: "app.component.html",
styleUrls: ["app.component.scss"],
})
export class AppComponent implements OnInit, OnDestroy {
user: any;
ownStore: boolean;
location: boolean;
isVerified: boolean;
isOnline: boolean;
public loggedIn;
public token;
device_screen ;
//observables
profile$;
isUpdating$;
menuStatus$: any;
public show;
subscription: Subscription;
//Check if internet connection
createOnline$() {
return merge<boolean>(
fromEvent(window, "offline").pipe(map(() => false)),
fromEvent(window, "online").pipe(map(() => true)),
new Observable((sub: Observer<boolean>) => {
sub.next(navigator.onLine);
sub.complete();
})
);
}
constructor(
private platform: Platform,
private splashScreen: SplashScreen,
private statusBar: StatusBar,
private router: Router,
private authService: AuthApiService,
public alertController: AlertController,
public menuState: MenuStateService,
private userFacade: UserFacadeService,
breakpointObserver: BreakpointObserver,
) {
this.initializeApp();
////loading
this.isUpdating$ = this.userFacade.isUpdating$();
this.subscription = router.events.subscribe((event) => {
if (event instanceof NavigationStart) {
browserRefresh = !router.navigated;
}
});
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
initializeApp() {
this.platform.ready().then(() => {
this.statusBar.styleDefault();
this.splashScreen.hide();
});
}
ngOnInit() {
//check if internet is on
this.createOnline$().subscribe((isOnline) => {
if (isOnline) {
this.isOnline = isOnline;
//update menu appropriately
this.menuStatus$ = this.menuState
.getMenuStatus()
.pipe(distinctUntilChanged());
// Is user logged in
this.loggedIn = this.menuState.loggedIn().pipe(distinctUntilChanged());
if (!!localStorage.getItem("token")) {
//get User profile
this.userFacade.loadUser();
this.profile$ = this.userFacade
.getUser$()
.pipe(distinctUntilChanged());
} else {
//if User not logged in
this.menuState.updateMenuStatus("none");
}
this.token = !!localStorage.getItem("token");
console.log(this.profile$);
} else {
//if user offline
this.router.navigate(["/auth/user/login"]);
}
});
}
verifyAgain() {
this.authService.verifyAgain().subscribe(
(res) => {
console.log(res.message);
//alertController
},
(err) => {
console.log(err);
}
);
}
logout() {
localStorage.removeItem("token");
this.userFacade.resetUserObs();
this.menuState.loggedIn();
this.menuState.updateMenuStatus("none");
this.router.navigate(["/user/login"]);
}
show_Confirm() {
this.show = true;
}
ngOnDestroy() {
this.subscription.unsubscribe();
}
}
<file_sep>import { Injectable } from "@angular/core";
import { ProfileApiService } from "src/app/core-modules/services/shared/profile/profile-api/profile-api.service";
import { ProfileStateService } from "src/app/core-modules/services/shared/profile/profile-state/profile-state.service";
import { distinctUntilChanged } from "rxjs/operators";
import { ToastController } from "@ionic/angular";
import { Router } from "@angular/router";
@Injectable({
providedIn: "root",
})
export class UserFacadeService {
constructor(
private userApi: ProfileApiService,
private userState: ProfileStateService,
private router: Router,
public toastController: ToastController
) {}
isUpdating$() {
return this.userState.isUpdating$();
}
getSelectedStore() {
return this.userState.getSelectedStore();
}
setSelectedStore(data) {
return this.userState.setSelectedStore(data);
}
resetSelectedStore() {
return this.userState.resetSelectedStore();
}
resetUserObs() {
return this.userState.resetUserObs();
}
getCurrentStore() {
return this.userState.getCurrentStore();
}
setCurrentStore(data) {
return this.userState.setCurrentStore(data);
}
updateStoreLike(data) {
this.userApi.updateStoreLike(data).subscribe(
(res) => {
let store = res.store;
this.userState.setStoreProfile(store);
},
(err) => {}
);
}
getUser$() {
// here we just pass the state without any projections
// it may happen that it is necessary to combine two or more streams and expose to the components
return this.userState.getUser$();
}
getStoreProfile$() {
return this.userState.getStoreProfile$();
}
loadStoreProfile(id) {
this.userState.setUpdating(true);
this.userApi.storeProfile({ _id: id }).subscribe(
(res_store) => {
//loadStore
let store = res_store.store;
this.userState.setStoreProfile(store);
this.setCurrentStore(store);
},
(err) => {
console.log(err);
}
);
}
returnStoreProfile(id) {
this.loadStoreProfile(id);
return this.getStoreProfile$();
}
updateStoreLogo(data) {
return this.userApi.updateStoreLogo(data);
}
updateProfileImage(data) {
return this.userApi.updateProfileImage(data);
}
updateStoreProfile(data) {
this.userState.setUpdating(true);
console.log(data);
this.userApi.updateStoreProfile(data).subscribe(
(res_store) => {
//loadStore
let store = res_store.store;
this.userState.setStoreProfile(store);
this.setCurrentStore(store);
this.presentToast("store updated");
this.router.navigate(["/admin-store/store"]);
},
(err) => {
console.log(err);
this.presentToast("oops! something went wrong");
}
);
}
loadUser() {
this.userState.setUpdating(true);
this.userApi.getUser().subscribe(
(res) => {
let user = res.user;
this.userState.setUser(user);
},
(err) => {
console.log(err);
},
() => this.userState.setUpdating(false)
);
}
// update user optimistic way
updateUser(data) {
this.userState.setUpdating(true);
//update user state
this.userState.updateUser(data);
//update database user item status
this.userApi.updateUser(data).subscribe(
(res) => {
console.log("item updated");
},
(err) => {
//we need to rollback
console.log(err);
},
() => this.userState.setUpdating(false)
);
}
removeUser(user) {
//change the state
this.userState.removeUser(user);
//update database
let id = { id: user._id };
}
updateAdmin(data) {
this.userApi.storeUpdateAdmin(data).subscribe(
(res) => {
this.setCurrentStore(res.store);
this.presentToast("Admin user modified!");
},
(err) => {
console.log(err);
}
);
}
removeAdmin(admin) {
//remove from database
this.userApi.storeRemoveAdmin(admin).subscribe(
(res) => {
console.log(res);
},
(err) => {
console.log(err);
}
);
}
async presentToast(msg) {
const toast = await this.toastController.create({
color: "color",
message: msg,
duration: 2500,
});
toast.present();
}
}
<file_sep>import { Injectable, Injector } from '@angular/core';
import { HttpInterceptor, HttpRequest, HttpHandler, HttpEvent } from '@angular/common/http';
import { Observable } from 'rxjs';
import { tap } from 'rxjs/operators';
import { Router } from '@angular/router';
import { AuthApiService } from '../../services/shared/authentication/auth-api/auth-api.service';
@Injectable({
providedIn: 'root'
})
export class TokenInterceptorService implements HttpInterceptor {
constructor(
private injector: Injector,
private router: Router
) { }
intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>>{
let authService = this.injector.get(AuthApiService)
let tokenizedReq = req.clone({
setHeaders:{
Authorization: `Bearer ${authService.getToken()}`
}
})
return next.handle(tokenizedReq);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class OrdersApiService {
_urlStoreOrdersById = `${this.env.apiUrl}/api/payfast/get_store_ordersById`;
_urlUpdateOrder = `${this.env.apiUrl}/api/payfast/update_order_status`;
_urlReturnActionUpdate = `${this.env.apiUrl}/api/payfast/update_return_action`;
constructor(private _http: HttpClient, private env: EnvService) {}
getStoreOrdersById(id) {
return this._http.post<any>(this._urlStoreOrdersById, id);
}
updateOrderStatus(data: any) {
return this._http.post<any>(this._urlUpdateOrder, data);
}
returnActionUpdate(data) {
return this._http.post<any>(this._urlReturnActionUpdate, data);
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ModalController } from "@ionic/angular";
import { CartService } from "src/app/core-modules/services/cart/cart-state/cart.service";
import { CartModalComponent } from "../modals/cart-modal/cart-modal.component";
import { Location } from "@angular/common";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { ProductStateService } from 'src/app/core-modules/services/products/product-state/product-state.service';
import { ProductFacadeService } from 'src/app/core-modules/services/products/product-facade/product-facade.service';
import { Router } from '@angular/router';
@Component({
selector: "app-user-header",
templateUrl: "./header.component.html",
styleUrls: ["./header.component.scss"],
})
export class HeaderComponent implements OnInit {
cartItemCount;
transparent_toolbar;
show_back;
headerStatus$;
token ;
profile$;
avatar;
showSearchB = false;
constructor(
private _location: Location,
private cartService: CartService,
private modalCtrl: ModalController,
private headerState: HeaderStateService,
private productState: ProductStateService,
private _productFacade: ProductFacadeService,
private router: Router,
) {}
ngOnInit() {
this.cartItemCount = this.cartService.getCartItemCount();
this.headerStatus$ = this.headerState.getHeaderStatus();
this.headerState.getHeaderStatus().subscribe(
(res) => {
if (res == "product_detail") {
this.transparent_toolbar = "clear_navbar";
}
},
(err) => {
console.log(err);
}
);
this.token = !!localStorage.getItem("token");
}
async openCart() {
const modal = await this.modalCtrl.create({
component: CartModalComponent,
//enableBackdropDismiss: false,
cssClass: "custom-modal-css",
});
return await modal.present();
}
goBack() {
this._location.back();
this.headerState.resetHeaderStatus();
}
showSearch(){
//this.showSearchB = true
this.loadAllProducts()
}
cancelSearch(){
this.showSearchB = false
}
loadAllProducts() {
localStorage.setItem("all_p", "all departments");
this._productFacade.loadProducts();
this._productFacade.getProducts$().subscribe(
(res) => {
this.productState.setMainProducts(res);
this.productState.setSearchingProducts(res);
this.router.navigate(["/landing/all-products"]);
},
(err) => {}
);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { AuthApiService } from 'src/app/core-modules/services/auth/auth-api/auth-api.service';
import { HttpClient, HttpErrorResponse } from '@angular/common/http';
import { Router, ActivatedRoute } from '@angular/router';
import { FormGroup, FormControl, FormBuilder, Validators } from '@angular/forms';
import { PasswordValidator } from '../../shared/password.validator';
@Component({
selector: 'app-reset-password',
templateUrl: './reset-password.page.html',
styleUrls: ['./reset-password.page.scss'],
})
export class ResetPasswordPage implements OnInit {
initId: string;
initToken: string;
private resetPasswordForm : FormGroup
constructor(
private _authService: AuthApiService,
private router: Router,
private activatedRoute: ActivatedRoute,
private formBuilder: FormBuilder
) { }
ngOnInit() {
//get params from backend query string
this.activatedRoute.queryParams.subscribe(
params => {
this.initId = params['id'];
this.initToken = params['token']
}
)
//create new form-group instance
this.resetPasswordForm = this.formBuilder.group({
//create instances of form-control
id: [this.initId, [Validators.required]],
token: [this.initToken, [Validators.required]],
password: ['', [Validators.required, Validators.minLength(6) ]],
confirmPassword: ['', Validators.required],
}, {validator: PasswordValidator});
}
onSubmit(){
//register to service and subscribe to observable
this._authService.resetPassword(this.resetPasswordForm.value).subscribe(
response => {
console.log(response)
this.router.navigate(['/user/login'], {state: {data: response.message}});
},
error => console.error('Error', error)
)
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { MenuStateService } from "src/app/core-modules/services/menus/menu-state/menu-state.service";
import { Router } from "@angular/router";
@Component({
selector: 'app-popover',
templateUrl: './popover.component.html',
styleUrls: ['./popover.component.scss'],
})
export class PopoverComponent implements OnInit {
constructor( public menuState: MenuStateService, private router: Router,
private userFacade: UserFacadeService,) { }
ngOnInit() {}
logout() {
localStorage.removeItem("token");
this.userFacade.resetUserObs();
this.menuState.loggedIn();
this.menuState.updateMenuStatus("none");
this.router.navigate(["/user/login"]);
}
}
<file_sep>import { Component, OnInit, Input } from "@angular/core";
import { MenuStateService } from "src/app/core-modules/services/menus//menu-state/menu-state.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { distinctUntilChanged } from "rxjs/operators";
@Component({
selector: "app-user-menu",
templateUrl: "./user.component.html",
styleUrls: ["./user.component.scss"],
})
export class UserComponent implements OnInit {
public order;
public avatar;
isUpdating$;
profile$;
token;
@Input() storeOwner: boolean;
constructor(
public menuState: MenuStateService,
private userFacade: UserFacadeService
) {
this.isUpdating$ = this.userFacade.isUpdating$();
}
ngOnInit() {
//get user orders
//get User profile
this.token = !!localStorage.getItem("token");
this.userFacade
.getUser$()
.pipe(distinctUntilChanged())
.subscribe(
(res) => {
this.profile$ = res;
if (this.profile$.profileImage) {
this.avatar = false;
} else {
this.avatar = true;
}
console.log(this.profile$);
},
(err) => {
console.log(err);
}
);
}
adminShow(admin) {
this.menuState.updateMenuStatus(admin);
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { VoucherFacadeService } from "./voucher-facade.service";
describe("VoucherFacadeService", () => {
let service: VoucherFacadeService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(VoucherFacadeService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
@Injectable({
providedIn: "root",
})
export class ProductStateService {
private detail_product$ = new BehaviorSubject<any[]>([]);
private database_product$ = new BehaviorSubject<any[]>([]);
private similar_product$ = new BehaviorSubject<any>([]);
private detail_stack$ = new BehaviorSubject<any[]>([]);
private loading$ = new BehaviorSubject<boolean>(false);
private main_products$ = new BehaviorSubject<[]>([]);
private searching_products$ = new BehaviorSubject<[]>([]);
private store_products$ = new BehaviorSubject<any[]>([]);
private store_optimum_products$ = new BehaviorSubject<any[]>([]);
private products$ = new BehaviorSubject<any[]>([]);
//profile
private current_product$ = new BehaviorSubject<any>([]);
private user_reviews$ = new BehaviorSubject<any[]>([]);
private user_liked_products$ = new BehaviorSubject<any[]>([]);
constructor() {}
updateLoading(value) {
this.loading$.next(value);
}
subscribeLoading() {
return this.loading$.asObservable();
}
/********************DETAIL PRODUCT PAGE *********************/
setDetailProduct(product) {
this.detail_product$.next([product]);
}
getDetailProduct() {
return this.detail_product$.asObservable();
}
setSimilarProducts(products) {
this.similar_product$.next(products);
}
getSimilarProducts() {
return this.similar_product$.asObservable();
}
setDataBaseProduct(product) {
this.database_product$.next(product);
}
getDataBaseProduct() {
return this.database_product$.asObservable();
}
setStackProducts(product) {
let stack = this.detail_stack$.getValue();
this.detail_stack$.next([product, ...stack]);
}
reloadPreviousItem() {
let stack = this.detail_stack$.getValue();
//set detail page
stack.shift();
console.log(stack);
let detail_product = stack[0];
stack.shift();
//set new stack
console.log(stack);
this.detail_stack$.next(stack);
console.log(detail_product);
return detail_product;
}
removePreviousItem() {
return this.detail_stack$.next([]);
}
getStackProducts() {
return this.detail_stack$.getValue();
}
/******************** END DETAIL PRODUCT PAGE *********************/
setViewProducts(products) {
this.main_products$.next(products);
}
getViewProducts() {
return this.main_products$.asObservable();
}
setSearchingProducts(products) {
this.searching_products$.next(products);
}
getSearchingProducts() {
return this.searching_products$.asObservable();
}
setMainProducts(products) {
this.main_products$.next(products);
}
getMainProducts() {
return this.main_products$.asObservable();
}
setStoreProducts(products) {
this.store_products$.next(products);
}
setOptimumStoreProducts(products) {
this.store_optimum_products$.next(products);
}
getStoreProducts() {
return this.store_products$.asObservable();
}
getOptimumStoreProducts() {
return this.store_optimum_products$.asObservable();
}
setProducts(products) {
this.products$.next(products);
}
getProducts$() {
return this.products$.asObservable();
}
setSegmentsProducts(products, signal) {
if (signal == "category") {
this.main_products$.next(products);
this.searching_products$.next(products);
console.log(products, this.main_products$, this.searching_products$);
}
}
//PROFILE
setCurrentProduct(product) {
this.current_product$.next(product);
}
getCurrentProduct() {
return this.current_product$;
}
setUserReviews(products) {
this.user_reviews$.next(products);
}
getUserReviews() {
return this.user_reviews$.asObservable();
}
setUserLikedProducts(products) {
return this.user_liked_products$.next(products);
}
getUserLikedProducts() {
return this.user_liked_products$.asObservable();
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject } from "rxjs";
@Injectable({
providedIn: "root",
})
export class ProductsStateService {
private current_store_product$ = new BehaviorSubject<any[]>([]);
constructor() {}
setCurrentStoreProducts(product) {
this.current_store_product$.next(product);
}
getCurrentStoreProducts() {
return this.current_store_product$.asObservable();
}
updateProduct(updatedProduct) {
const products = this.current_store_product$.getValue();
const indexOfUpdated = products.findIndex((product) => {
return product._id === updatedProduct._id;
});
products[indexOfUpdated] = updatedProduct;
this.current_store_product$.next([...products]);
}
addProduct(product) {
const currentValue = this.current_store_product$.getValue();
this.current_store_product$.next([...currentValue, product]);
}
removeProduct(productRemove) {
const products = this.current_store_product$.getValue();
const indexOfUpdated = products.findIndex((product) => {
return product._id === productRemove._id;
});
products.splice(indexOfUpdated, 1);
this.current_store_product$.next([...products]);
}
resetStoreProducts() {
return this.current_store_product$.next([]);
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { FormGroup, FormControl } from "@angular/forms";
import { Router } from "@angular/router";
import { OrdersFacadeService } from "src/app/core-modules/services/user/orders/orders-facade/orders-facade.service";
@Component({
selector: "app-delivery-verification",
templateUrl: "./delivery-verification.page.html",
styleUrls: ["./delivery-verification.page.scss"],
})
export class DeliveryVerificationPage implements OnInit {
verifyReviewForm = new FormGroup({
courier_ref: new FormControl(""),
courier_OTP: new FormControl(""),
});
constructor(
private _orderFacade: OrdersFacadeService,
private router: Router
) {}
ngOnInit() {}
verifyDelivery() {
let data = this.verifyReviewForm.value;
this._orderFacade.verifyOrderDelivery(data);
this.router.navigate(["/user/landing/home"]);
}
}
<file_sep>const mongoose = require("mongoose");
// Notifications Schema
const Schema = mongoose.Schema;
const ActivitySchema = new Schema({
viewed: {
type: Boolean,
},
store_id: {
type: String,
required: true,
},
order: {
type: Object,
},
type: {
type: String,
required: true,
},
title: {
type: String,
required: true,
},
msg: {
type: String,
required: true,
},
user_email: {
type: String,
required: true,
},
date: {
type: Date,
default: Date.now,
},
});
// User model
const Notification = mongoose.model("notification", ActivitySchema);
module.exports = Notification;
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ShopByDepartmentComponent} from "./shop-by-department/shop-by-department.component";
import { TopSalesComponent} from "./top-sales/top-sales.component";
import { PromoSlidesComponent } from "./promo-slides/promo-slides.component";
import { SalesComponent} from './sales/sales.component'
import { AdSpaceComponent } from "./ad-space/ad-space.component";
import {MostLikedComponent} from "./most-liked/most-liked.component";
import { PromotionsComponent} from "./promotions/promotions.component"
import {UserLikedComponent } from "./user-liked/user-liked.component"
@NgModule({
declarations: [
ShopByDepartmentComponent,
TopSalesComponent,
PromoSlidesComponent,
SalesComponent,
AdSpaceComponent,
MostLikedComponent,
PromotionsComponent,
UserLikedComponent
],
imports: [
CommonModule,
],
exports: [
ShopByDepartmentComponent,
TopSalesComponent,
PromoSlidesComponent,
SalesComponent,
AdSpaceComponent,
MostLikedComponent,
PromotionsComponent,
UserLikedComponent
]
})
export class HomeComponentsModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { ToastController } from '@ionic/angular';
import { UserFacadeService } from 'src/app/core-modules/services/profile/profile-facade/profile-facade.service';
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: 'app-users',
templateUrl: './users.page.html',
styleUrls: ['./users.page.scss'],
})
export class UsersPage implements OnInit {
public segmentChanged = "users"
public users;
device_screen;
filter
constructor(
private userFacade: UserFacadeService,
public toastController: ToastController,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnInit() {
this.filter = 'Default'
this.getUsers()
}
getUsers() {
//get current store
let store = this.userFacade.getCurrentStore();
//load store profile and subscribe store products
if (Object.keys(store).length === 0) {
this.userFacade.getUser$().subscribe(
//load default store
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
let default_store = this.userFacade.getCurrentStore();
this.users = default_store.users
},
(err) => {
console.log(err);
}
);
} else {
this.userFacade.loadStoreProfile(store._id);
let default_store = this.userFacade.getCurrentStore();
this.users = default_store.users
}
}
changeSegment(data){
let store = this.userFacade.getCurrentStore();
let store_users = store.users.length
let lisences = store.lisence
let today_ms = Date.now()
let one_day_ms = 86400000
let allow_access = false
lisences.forEach(lisence => {
let lisence_end_date = lisence.payed_amount.end_date
let time_remaining = (lisence_end_date - today_ms)/one_day_ms
if(store_users < lisence.number_of_users && time_remaining > 0){
allow_access = true
}
if(allow_access){
this.segmentChanged = data
}
});
if(!allow_access){
this.presentToast(
"Buy license to add more users"
);
}
}
async presentToast(msg) {
const toast = await this.toastController.create({
message: msg,
duration: 2500,
});
toast.present();
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { StoresListComponent} from './stores-list/stores-list.component';
@NgModule({
declarations: [
StoresListComponent,
],
imports: [
CommonModule
],
exports: [
StoresListComponent
]
})
export class StoresModule { }
<file_sep>import { Component, OnInit, Output, Input, EventEmitter, OnChanges } from "@angular/core";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import {
FormGroup,
FormBuilder,
} from "@angular/forms";
import { distinctUntilChanged } from 'rxjs/operators';
import { ToastController } from '@ionic/angular';
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-admin-users",
templateUrl: "./admin-users.component.html",
styleUrls: ["./admin-users.component.scss"],
})
export class AdminUsersComponent implements OnInit, OnChanges {
@Output() segment = new EventEmitter();
public no_user;
public users;
@Input() users$
public store;
updateUser: FormGroup;
modules;
email;
edit;
device_screen;
desktopViewUser
expand_detail
constructor(
private userFacade: UserFacadeService,
private formBuilder: FormBuilder,
breakpointObserver: BreakpointObserver,
public toastController: ToastController
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnChanges() {
this.users = this.users$
}
ngOnInit() {
this.users = this.users$
this.desktopViewUser =this.users[0]
this.no_user = false;
// this.users = this.userFacade.getCurrentStore().users;
//initiATE FORM
this.updateUser = this.formBuilder.group({
//create instances of form-control
email: [""],
user_type: [""],
role: [""],
});
}
expanded(v) {
let user = { ...v };
if (this.email == user.email) {
return null;
} else {
this.email = user.email;
this.modules = user.module_access;
this.updateUser.patchValue({
email: user.email,
user_type: user.user_type,
role: user.role,
});
}
console.log(this.modules);
}
onChange(event) {
this.modules = event.detail.value;
}
deleteModule(data) {
let modules = this.modules.filter((module) => {
return module !== data;
});
this.modules = modules;
}
editUser(admin) {
//this.clickButton = true;
if (admin.user_type == "Main") {
this.presentToast("You cannot modify main user")
this.edit = false;
} else {
this.edit = true;
}
}
update(admin) {
let store = this.userFacade.getCurrentStore();
this.edit = false;
let user = { ...this.updateUser.value, module_access: this.modules };
//update current and database
let other_users = this.users.filter((fs) => {
return fs.email !== user.email;
});
let users = [...other_users, user]; //update on index
this.users = users;
//update database
let data = {
store_id: store._id,
users: users
}
this.userFacade.updateAdmin(data);
}
onDelete(admin) {
//remove from state
let data = this.users.filter((user) => {
return user !== admin;
});
if (admin.user_type == "Main") {
this.presentToast("You cannot delete main user")
} else {
this.users = data;
//this.userFacade.store;
this.userFacade.removeAdmin(admin);
}
}
changeSegment(data) {
this.segment.emit(data);
console.log(data);
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 2000,
position: "bottom",
});
toast.present();
}
setDesktopUserView(user){
this.desktopViewUser = user
console.log(user)
this.edit = true;
}
expand(event){
if(this.expand_detail){
this.expand_detail = false
}else{
this.expand_detail = true
}
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ProfileFacadeService } from "./profile-facade.service";
describe("ProfileFacadeService", () => {
let service: ProfileFacadeService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ProfileFacadeService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>const nodemailer = require("nodemailer");
const jwt = require('jwt-simple');
const keys = require("../../../config/users/keys");
const smtp = require("../../config/email/vars");
const frontHost = require("../../config/front/server");
module.exports = function emailAuth(user, req, res) {
//Send verification Email
const smtpTransport = nodemailer.createTransport({
host: smtp.server.host,
port: smtp.server.port,
auth: {
user: smtp.server.auth.user,
pass: smtp.server.auth.pass
}
});
// Create JWT Payload
const payload = {
id: user.id,
name: user.name,
email: user.email
};
// create a unique secret key!
const secret = user.password + '-' + user.date + keys.secretOrKey;
const token = jwt.encode(payload, secret);
// Prepare Email
let mailOptions, host, link;
host = req.get('host');
link = frontHost.server + 'user/reset-password/?id=' + payload.id + '&token=' + token;
mailOptions = {
to : user.email,
subject : "Reset Password",
html : "Hello,<br> Please Click on the link to reset your password.<br><a href="+link+">Click here to reset Password</a>"
}
// Send email containing link to reset password.
smtpTransport.sendMail(mailOptions, function(error, response){
if(error){
res.json({
message: "Oops something went wrong while sending",
err: error
});
}else{
res.json({
message: "Please check your email to reset your password",
user:user
})
}
});
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { Md5 } from "ts-md5/dist/md5";
import { PayfastFacadeService } from "src/app/core-modules/services/payments/payfast-facade/payfast-facade.service";
@Component({
selector: "app-payfast",
templateUrl: "./payfast.component.html",
styleUrls: ["./payfast.component.scss"],
})
export class PayfastComponent implements OnInit {
md5 = new Md5();
// payfast Form
payfastForm: any = {};
constructor(private payfastFacade: PayfastFacadeService) {}
ngOnInit() {
this.payfastForm = this.payfastFacade.getPayfastForm();
}
tempCart(order_info) {
// Temporary cart for success response
this.payfastFacade.temporaryOrder(order_info, null);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject } from "rxjs";
@Injectable({
providedIn: "root",
})
export class ActivityStateService {
private updating$ = new BehaviorSubject<boolean>(false);
private activity$ = new BehaviorSubject<any>([]);
private notification$ = new BehaviorSubject<any>([]);
private store = new BehaviorSubject<boolean>(false);
constructor() {}
// return updating$ status
isUpdating$() {
return this.updating$.asObservable();
}
// change updating$ status
setUpdating(isUpdating: boolean) {
this.updating$.next(isUpdating);
}
// return activity$ state
getActivities$() {
return this.activity$.asObservable();
}
// Load new set of activity
setActivity(activity) {
this.activity$.next(activity);
}
///return activity$ state
getStoreSignal() {
return this.store.getValue();
}
// Load new set of activity
setStoreSignal(value) {
this.store.next(value);
}
// add new activity to activity$ state
addActivity(activity) {
const currentValue = this.activity$.getValue();
this.activity$.next([...currentValue, activity]);
}
// remove activity from activity$
removeActivity(activityRemove) {
const currentValue = this.activity$.getValue();
this.activity$.next(
currentValue.filter((activity) => activity !== activityRemove)
);
}
////*** notifications ***/ */
// Load new set of activity
setNotification(activity) {
this.notification$.next([]);
this.notification$.next(activity);
}
// return activity$ state
getNotifications$() {
return this.notification$.asObservable();
}
// add new activity to activity$ state
addNotification(activity) {
const currentValue = this.activity$.getValue();
this.notification$.next([...currentValue, activity]);
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { StoreSelectionsPageRoutingModule } from './store-selections-routing.module';
import { StoreSelectionsPage } from './store-selections.page';
import { StoresModule } from './stores/stores.module';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
StoresModule ,
StoreSelectionsPageRoutingModule,
],
declarations: [StoreSelectionsPage]
})
export class StoreSelectionsPageModule {}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
@Injectable({
providedIn: "root",
})
export class ReceiptStateService {
private updating$ = new BehaviorSubject<boolean>(false);
private receipt$ = new BehaviorSubject<any>({});
constructor() {}
// return updating$ status
isUpdating$() {
return this.updating$.asObservable();
}
// change updating$ status
setUpdating(isUpdating: boolean) {
this.updating$.next(isUpdating);
}
// return receipt$ state
getReceipts$() {
return this.receipt$.asObservable();
}
// Load new set of receipt
setReceipt(receipt) {
this.receipt$.next(receipt);
}
// update receipt in receipt$ state
updateReceipt(updatedReceipt) {
const receipt = this.receipt$.getValue();
const indexOfUpdated = receipt.findIndex((receipt) => {
receipt.id === updatedReceipt.id;
});
receipt[indexOfUpdated] = updatedReceipt;
this.receipt$.next([...receipt]);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { OrdersApiService } from "../orders-api/orders-api.service";
import { OrdersStateService } from "../orders-state/orders-state.service";
@Injectable({
providedIn: "root",
})
export class OrdersFacadeService {
constructor(
private orderState: OrdersStateService,
private orderApi: OrdersApiService
) {}
loadStoreOrdersById(id) {
this.orderApi.getStoreOrdersById({ id: id }).subscribe(
(res) => {
let orders = res.orders;
this.orderState.setOrders(orders);
},
(err) => {
console.log(err);
}
);
}
getStoreOrders$() {
return this.orderState.getOrders$();
}
updateStoreAdminOrder(data) {
this.orderApi.updateOrderStatus(data).subscribe(
(res) => {
this.orderState.updateOrder(res.order);
},
(err) => {
// TODO:
//we need to rollback
console.log(err);
}
);
//update database order item status
return this.orderApi.updateOrderStatus(data);
}
// update store status order optimistic way
updateStoreOrder(data) {
//update order state
this.orderState.updateOrder(data.data);
//update database order item status
this.orderApi.updateOrderStatus(data).subscribe(
(res) => {
this.orderState.updateOrder(res.order);
},
(err) => {
// TODO:
//we need to rollback
console.log(err);
}
);
}
getOrderDelivery() {
return this.orderState.getOrderDelivery$();
}
setOrderDelivery(order) {
this.orderState.setOrderDelivery$(order);
}
returnActionUpdate(data) {
this.orderApi.returnActionUpdate({ data: data }).subscribe(
(res) => {
console.log(res);
},
(err) => {
console.log(err);
}
);
}
/*loadSearchedStoreOrders(val) {
this.orderState.setUpdating(true);
this.orderApi.getStoreOrders().subscribe(
(res) => {
let orders = res.orders;
let searched = orders.filter((item) => {
return (
item.order_number.toLowerCase().indexOf(val.toLowerCase()) > -1
);
});
this.orderState.setOrders(searched);
},
(err) => {
console.log(err);
},
() => this.orderState.setUpdating(false)
);
}*/
}
<file_sep>const express = require("express");
const keys = require("../../../config/users/keys");
const jwt = require('jwt-simple');
const router = express.Router();
// Access Validations
const validateEmailInput = require("../../../validation/authenticate/userValidation/forgotPassword");
const sendPasswordLink = require("../../../validation/authenticate/emailAuth/sendPassword");
const resetValidation =require("../../../validation/authenticate/userValidation/resetPasswordValidation");
const updatePassword =require("../../../validation/authenticate/userHashing/updatePassword");
// Access Models
const User = require("../../../models/users/User");
// Email Reset Link
router.post("/resetLink", (req, res) => {
const { errors, isValid } = validateEmailInput(req.body);
if (!isValid) {
return res.status(400).json(errors);
}
User.findOne({ email: req.body.email }).then(user => {
// Send link
sendPasswordLink(user, req, res)
}).catch(err => console.log(err, "Email is not registered"));
});
// Handle new password and update db
router.post('/resetPassword', function(req, res) {
const { errors, isValid } = resetValidation(req.body);
if (!isValid) {
return res.status(400).json(errors);
}
User.findOne({ _id: req.body.id }).then(user => {
// Decrypt and update if user exists
const secret = user.password + '-' + user.date + keys.secretOrKey;
const payload = jwt.decode(req.body.token, secret);
if(payload){
// Update Password
updatePassword(user, req.body, req, res);
}else{
res.json({message: "Token Invalid"})
}
}).catch(err => res.json({message: "Something wrong with the token"}));
});
module.exports = router;
<file_sep>import { Injectable } from "@angular/core";
@Injectable({
providedIn: "root",
})
export class ProductOperatorService {
constructor() {}
getSimilarProductsParams(product) {
console.log(product);
//prepare data to request similar products
let category = product.category;
category.product_id = product._id;
let params = {
value: category,
signal: "similar_products",
};
return params;
}
updateLocalStorageDetailProduct(product) {
let p = JSON.stringify(product);
localStorage.setItem("details_product", p);
return;
}
localStorageProductConfig(product) {
let localSt_product = JSON.parse(localStorage.getItem("details_product"));
let new_product;
//if local local storage product exist
if (localSt_product) {
new_product = localSt_product;
} else {
//assign to db product
new_product = product;
}
return new_product;
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ActivityFacadeService } from "src/app/core-modules/services/activities/activity-facade/activity-facade.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
@Component({
selector: "app-activity",
templateUrl: "./activity.component.html",
styleUrls: ["./activity.component.scss"],
})
export class ActivityComponent implements OnInit {
public no_Activity;
public read;
public activities$;
constructor(
private userFacade: UserFacadeService,
private activityFacade: ActivityFacadeService
) {}
ngOnInit() {
//Load activities
let store = this.userFacade.getCurrentStore();
this.activityFacade.loadActivities(store._id);
//get activities
this.activities$ = this.activityFacade.getActivities$();
//Terrrence
this.activities$.subscribe(
(res) => {
if (res == undefined || res.length < 1) {
this.no_Activity = true;
}
},
(err) => {
console.log(err);
}
);
this.read = "not_read";
this.no_Activity = false;
}
setRead() {
this.read = "read_class";
}
pushActivity() {
//Prepare activity
let notify = {
type: "Activity",
title: "admin deleted a product",
msg: "product two from Shoes Categories",
user_ini: "TM",
user_name: "<NAME>",
};
let data = { activity: notify };
//send data
this.activityFacade.pushActivity(data);
}
}
<file_sep>const mongoose = require("mongoose");
// User Schema
const Schema = mongoose.Schema;
const ProductSchema = new Schema({
storeId: {
type: String,
required: true,
},
productName: {
type: String,
required: true,
},
category: {
type: Object,
required: true,
},
productDescription: {
type: String,
required: true,
},
productImage: {
type: Array,
required: true,
},
metaTags: {
type: Array,
required: true,
},
features: {
type: Array,
},
bill_of_material: {
type: Object,
},
productPrice: {
type: Number,
required: true,
},
modified_price: {
type: Number,
},
amount: {
type: Number,
default: 1,
},
promo: {
type: Boolean,
required: false,
},
promo_type: {
type: Array,
required: false,
},
sale: {
type: Boolean,
required: false,
},
production_status: {
type: String,
default: "Active",
},
in_stock: {
type: Number,
default: 0,
},
sold: {
type: Number,
default: 0,
},
next_available_date: {
type: Date,
default: Date.now,
},
sale_type: {
type: Array,
required: false,
},
likes: {
type: Array,
default: [],
},
ratings: {
type: Array,
default: [],
},
license: {
type: Array,
},
date: {
type: Date,
default: Date.now,
},
});
// Product model
const Product = mongoose.model("products", ProductSchema);
module.exports = Product;
<file_sep>import { Component, Input, OnInit } from "@angular/core";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
@Component({
selector: "app-product-view-modal",
templateUrl: "./product-view-modal.component.html",
styleUrls: ["./product-view-modal.component.scss"],
})
export class ProductViewModalComponent implements OnInit {
@Input() data: any;
constructor(private productApi: ProductApiService) {}
ngOnInit() {
console.log(this.data);
}
replacePicture() {
//upload new image
//update product
//delete cloudinary image
//this.deleteCloudImage(Image_data);
}
deleteCloudImage(data) {
this.productApi.deleteCloudImage(data).subscribe(
(res) => {
console.log(res);
},
(err) => {
console.log(err);
}
);
}
}
<file_sep>import {
Component,
OnInit,
Output,
Input,
EventEmitter,
SimpleChanges,
OnChanges,
} from "@angular/core";
import { FormGroup, FormBuilder } from "@angular/forms";
import { PickerController } from "@ionic/angular";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
//Interfaces
import { PickerOptions } from "src/app/models/picker-model";
import { distinctUntilChanged } from "rxjs/operators";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
/*import { PickerButton } from 'src/app/models/picker-model';
import { PickerColumn } from 'src/app/models/picker-model';
import { PickerColumnOption } from 'src/app/models/picker-model';*/
@Component({
selector: "app-orders",
templateUrl: "./orders.component.html",
styleUrls: ["./orders.component.scss"],
})
export class OrdersComponent implements OnInit, OnChanges {
@Output() notifyParent: EventEmitter<any> = new EventEmitter();
public framework = "pending";
public views_order = [];
public active_expansion;
public filtered_array;
public panelOpenState = false;
clickButton;
public commit_date;
public desktopViewOrder;
public commit_chip = true;
//observables
orderList$;
isUpdating$;
no_order;
device_screen
expand_detail
detail_view_product
dash_selected_order
//public date: any = new Date().toISOString();
public date_form: FormGroup;
@Input() orders;
constructor(
private formBuilder: FormBuilder,
private pickerCtrl: PickerController,
private orderFacade: OrderFacadeService,
private userFacade: UserFacadeService,
breakpointObserver: BreakpointObserver,
private headerStateService: HeaderStateService,
) {
////loading
this.isUpdating$ = this.orderFacade.isUpdating$();
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
this.headerStateService.getDashActiveOrderSelect().pipe(distinctUntilChanged()).subscribe(
(res)=>{
this.dash_selected_order = res
console.log(res,"dah")
}
)
}
ngOnChanges(changes: SimpleChanges) {
this.orderList$ = this.orders.pipe(distinctUntilChanged());
}
ngOnInit() {
this.date_form = this.formBuilder.group({
date: [new Date().toISOString()],
});
this.orderList$.subscribe(
(res) => {
if(this.dash_selected_order._id){
this.desktopViewOrder = this.dash_selected_order
}else{
this.desktopViewOrder = res[0]
}
if (res == undefined || res.length < 1) {
this.no_order = true;
}
},
(err) => {
console.log(err);
}
);
}
async showBasicPicker(item, order_id, order_number, commit_date) {
let opts: PickerOptions = {
buttons: [
{
text: "cancel",
role: "cancel",
},
{
text: "done",
},
],
columns: [
{
name: "framework",
options: [
{ text: "ready", value: "ready" },
{ text: "pending", value: "pending" },
{ text: "preparing", value: "preparing" },
],
},
],
};
let picker = await this.pickerCtrl.create(opts);
picker.present();
picker.onDidDismiss().then(async (data) => {
let col = await picker.getColumn("framework");
this.framework = col.options[col.selectedIndex].text;
this.postToUpdateOrder(
item,
order_id,
order_number,
commit_date,
this.framework
);
});
}
postToUpdateOrder(item, order_id, order_number, commit_date, product_status) {
let store = this.userFacade.getCurrentStore();
let data = {
order_id: order_id,
item: item,
product_status: product_status,
order_number: order_number,
};
this.orderFacade.updateStoreOrder({ data: data, store_id: store._id });
}
commit(order_id, order_number, commit_date) {
let store = this.userFacade.getCurrentStore();
let data = {
_id: order_id,
order_id: order_id,
commit_date: commit_date,
order_number: order_number,
};
this.orderFacade.updateStoreOrder({ data: data, store_id: store._id });
}
expanded(order_number) {
if (this.active_expansion === order_number) {
this.active_expansion = null;
} else {
this.active_expansion = order_number;
}
}
checkProductReady(order_id) {
this.orderList$.subscribe(
(res) => {
let filtered_array = res.filter((item) => {
return item._id === order_id;
});
this.filtered_array = filtered_array;
},
(err) => {
console.log(err);
}
);
let stat = [];
this.filtered_array[0].items.forEach((z) => {
stat.push(z.order_status);
});
if (stat.includes("pending")) {
return false;
} else if (stat.includes("preparing")) {
return false;
} else {
return true;
}
}
countDown(commit_date) {
let dateSent = new Date();
let currentDate = new Date(commit_date);
return Math.floor(
(Date.UTC(
currentDate.getFullYear(),
currentDate.getMonth(),
currentDate.getDate()
) -
Date.UTC(
dateSent.getFullYear(),
dateSent.getMonth(),
dateSent.getDate()
)) /
(1000 * 60 * 60 * 24)
);
}
deliver(order) {
this.orderFacade.setOrderDelivery(order);
this.notifyParent.emit("deliver");
}
onCommit() {
this.clickButton = true;
this.commit_chip = false;
}
orderClicked(id) {
console.log(id);
}
openOrder(id) {
this.clickButton = false;
this.active_expansion = id;
console.log(this.active_expansion);
}
checkId(id) {
if (this.panelOpenState === true) {
return this.active_expansion === id;
}
}
setDesktopOrderView( order){
this.desktopViewOrder = order
console.log(order)
}
expand(event){
if(this.expand_detail){
this.expand_detail = false
}else{
this.expand_detail = true
}
this.detail_view_product =false
}
viewProduct(item){
this.detail_view_product = item
console.log(item)
}
}
<file_sep>import { Component, OnInit, ViewChild } from "@angular/core";
import { IonSlides } from "@ionic/angular";
import { Router } from "@angular/router";
import { ToastController } from "@ionic/angular";
import { AlertController } from "@ionic/angular";
import { FormGroup, FormControl } from "@angular/forms";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-deliver",
templateUrl: "./deliver.component.html",
styleUrls: ["./deliver.component.scss"],
})
export class DeliverComponent implements OnInit {
public self_delivery = true;
public delivery_date;
public today;
device_screen;
showBack ;
slides_number=1
showNext
courier_delivery_form = new FormGroup({
courier_name: new FormControl(""),
courier_ref: new FormControl(""),
});
@ViewChild("deliverSlider") slides: IonSlides;
constructor(
public toastController: ToastController,
public alertController: AlertController,
private orderFacade: OrderFacadeService,
private router: Router,
private userFacade: UserFacadeService,
breakpointObserver: BreakpointObserver,
) {
////loading
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnInit() {
this.showBack = false;
this.showNext = true;
this.today = new Date();
}
deliveryDate(e) {
this.delivery_date = e.detail.value;
}
updateDelivery(data) {
let store = this.userFacade.getCurrentStore();
if (data == "courier_delivery") {
this.orderFacade.getOrderDelivery().subscribe(
(res) => {
let dat = this.courier_delivery_form.value;
this.orderFacade.updateStoreOrder({
data: {
order_id: res._id,
delivery_type: "Courier",
delivery_date: this.delivery_date,
...dat,
},
store_id: store._id,
});
//reload data todo:
this.router.navigate(["/store/sales"]);
},
(err) => {
console.log(err);
}
);
} else {
this.orderFacade.getOrderDelivery().subscribe(
(res) => {
this.orderFacade.updateStoreOrder({data:{
order_id: res._id,
delivery_type: "Self",
delivery_date: this.delivery_date,
self: true,
}, store_id: store._id,});
//reload data todo:
this.router.navigate(["/store/sales"]);
},
(err) => {
console.log(err);
}
);
}
}
async presentSelfToast() {
const toast = await this.toastController.create({
message: "Self Delivery Selected as a delivery Method",
position: "bottom",
animated: true,
duration: 2000,
});
toast.present();
// this.showBadgeSale=true;
// this.showBadgeVoucher=false;
setTimeout(() => {
//this.notifyToSwipe()
}, 2010);
}
async presentCourToast() {
const toast = await this.toastController.create({
message: "Courier Selected as a delivery Method",
position: "bottom",
animated: true,
duration: 2000,
});
toast.present();
// this.showBadgeSale=true;
//this.showBadgeVoucher=false;
setTimeout(() => {
//this.notifyToSwipe()
}, 2010);
}
swipeNextCour() {
setTimeout(() => {
this.slides.slideNext();
this.presentCourToast();
}, 1000);
}
segmentChanged(value) {
let data = value.detail.value;
if (data == "Self_delivery") {
console.log(data);
this.self_delivery = true;
}
if (data == "third_party") {
this.self_delivery = false;
console.log(data);
}
}
swipeNextSelf() {
setTimeout(() => {
this.slides.slideNext();
this.presentSelfToast();
}, 1000);
}
slideChange(event) {
this.slides.getActiveIndex().then((data) => {
this.slides_number = data + 1;
if (data == 2) {
this.showNext = false;
this.showBack = true;
} else if (data == 0) {
this.showBack = false;
this.showNext = true;
} else {
this.showNext = true;
this.showBack = true;
}
});
this.slides.lockSwipes(true);
}
next() {
this.slides.lockSwipes(false);
this.slides.slideNext();
}
slidesBack() {
this.slides.lockSwipes(false);
this.slides.slidePrev();
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
@Component({
selector: "app-product-review",
templateUrl: "./product-review.component.html",
styleUrls: ["./product-review.component.scss"],
})
export class ProductReviewComponent implements OnInit {
public productReviews$;
constructor(private _productFacade: ProductFacadeService) {}
ngOnInit() {
this._productFacade.loadUserReviews();
this.productReviews$ = this._productFacade.getUserReviews();
console.log(this.productReviews$)
}
}
<file_sep>export function production(){
return 'http://localhost:3000'
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { Router } from "@angular/router";
@Component({
selector: 'app-store-selections',
templateUrl: './store-selections.page.html',
styleUrls: ['./store-selections.page.scss'],
})
export class StoreSelectionsPage implements OnInit {
constructor( private headerState: HeaderStateService, private router: Router,) { }
ngOnInit() {
}
setRoute(){
this.headerState.setReturnRoute("/user/stores");
this.router.navigate(['/auth/user/store-register'])
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { Papa } from "ngx-papaparse";
//import { File } from '@ionic-native/file/ngx';
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
@Component({
selector: "app-csv",
templateUrl: "./csv.component.html",
styleUrls: ["./csv.component.scss"],
})
export class CsvComponent implements OnInit {
jsonData: any[] = [];
csvData: any[] = [];
headerRow: any[] = [];
uploaded: boolean = false;
localUrl;
ProductReady: boolean;
allowed = false;
constructor(
private http: HttpClient,
private papa: Papa,
//private file: File,
private productApi: ProductApiService,
private userFacade: UserFacadeService
) { }
ngOnInit() {
}
handleFileInput(event) {
this.uploaded = true;
let file = event.target.files[0];
this.extractData(file);
}
extractData(data) {
let csvData = data || "";
this.papa.parse(csvData, {
complete: (parseData) => {
this.headerRow = parseData.data.splice(0, 1)[0];
this.csvData = parseData.data;
},
});
}
saveProducts() {
let store = this.userFacade.getCurrentStore();
this.productApi
.createBulkProducts({ csv: this.csvData, store_id: store._id })
.subscribe(
(res) => console.log(res),
(err) => console.log(err)
);
}
loadProducts() {
let store = this.userFacade.getCurrentStore();
this.productApi.getStoreProductsById(store._id).subscribe(
(res) => {
this.jsonData = res.products;
if (this.jsonData) {
this.ProductReady = true;
this.headerRow = Object.keys(this.jsonData[0]);
}
},
(err) => console.log(err)
);
}
exportCSV() {
if (this.allowed) {
let csv = this.papa.unparse({
fields: this.headerRow,
data: this.jsonData,
});
// desktop download
var blob = new Blob([csv]);
var a = window.document.createElement("a");
a.href = window.URL.createObjectURL(blob);
a.download = "Cartalist_products.csv";
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
} else {
console.log("buy lisence");
}
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpClient, HttpErrorResponse } from '@angular/common/http';
import { Router, ActivatedRoute } from '@angular/router';
@Component({
selector: 'app-google',
templateUrl: './google.page.html',
styleUrls: ['./google.page.scss'],
})
export class GooglePage implements OnInit {
id;
constructor(
private router: Router,
private activatedRoute: ActivatedRoute
) { }
ngOnInit() {
this.activatedRoute.queryParams.subscribe(params => {
this.id = params['id'];
if (this.id != null ){
localStorage.setItem('token', this.id);
this.router.navigate(['/user/landing']);
}else{
console.log('You are not google Authenticated')
}
})
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { RouterModule } from '@angular/router';
import { IonicModule } from '@ionic/angular';
import { FormsModule } from '@angular/forms';
import { ReactiveFormsModule } from '@angular/forms';
import {MatExpansionModule} from '@angular/material/expansion';
import { PromoComponent } from './promo/promo.component';
import { PaymentsComponent } from './payments/payments.component';
import { OrdersComponent } from './orders/orders.component';
import { AddpromoComponent } from './modals/addpromo/addpromo.component';
import { RunsaleComponent } from './promo/runSale/runsale.component';
import { DeliverComponent } from './orders/deliver/deliver.component';
import { ReceiptsComponent } from './payments/receipts/receipts.component';
@NgModule({
declarations: [
PromoComponent,
PaymentsComponent,
OrdersComponent,
AddpromoComponent,
RunsaleComponent,
DeliverComponent,
ReceiptsComponent
],
imports: [
CommonModule,
RouterModule,
IonicModule,
ReactiveFormsModule,
FormsModule,
MatExpansionModule
],
exports: [
PromoComponent,
PaymentsComponent,
OrdersComponent,
AddpromoComponent,
RunsaleComponent,
DeliverComponent,
ReceiptsComponent
],
entryComponents: [
AddpromoComponent
]
})
export class SalesComponentsModule { }
<file_sep>import { Component, OnInit, Input } from "@angular/core";
import { ActivatedRoute } from "@angular/router";
import { ProfileApiService } from "src/app/core-modules/services/shared/profile/profile-api/profile-api.service";
import { UserFacadeService } from "src/app/core-modules/services/shared/profile/profile-facade/profile-facade.service";
import { HeaderStateService } from "src/app/core-modules/services/utils/header-state/header-state.service";
@Component({
selector: "app-profile",
templateUrl: "./profile.component.html",
styleUrls: ["./profile.component.scss"],
})
export class ProfileComponent implements OnInit {
@Input() id: string;
public profile;
constructor(
private route: ActivatedRoute,
private userFacade: UserFacadeService,
private headerState: HeaderStateService,
private _profileApi: ProfileApiService
) {}
ngOnInit() {
console.log(this.id);
this.userFacade.getStoreProfile$().subscribe(
(res) => {
this.profile = res;
},
(err) => {
console.log(err);
}
);
console.log("Profile: ", this.profile);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
@Injectable({
providedIn: "root",
})
export class MenuStateService {
private menuStatus$ = new BehaviorSubject<string>("user");
private islogged$ = new BehaviorSubject<boolean>(false);
constructor() {}
getMenuStatus() {
return this.menuStatus$.asObservable();
}
updateMenuStatus(status) {
return this.menuStatus$.next(status);
}
loggedIn() {
this.islogged$.next(!!localStorage.getItem("token"));
return this.islogged$.asObservable();
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
@Component({
selector: 'app-settings',
templateUrl: './settings.component.html',
styleUrls: ['./settings.component.scss'],
})
export class SettingsComponent implements OnInit {
constructor(private headerState: HeaderStateService, private router: Router) { }
ngOnInit() {}
setRoute(){
this.headerState.setReturnRoute("fromUser");
this.router.navigate( ['/auth/user/store-register'] )
}
gotTo(data){
this.router.navigate( [data] )
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ModalController, ToastController } from "@ionic/angular";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { distinctUntilChanged } from "rxjs/operators";
import { VoucherFacadeService } from "src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
@Component({
selector: "app-sales",
templateUrl: "./sales.page.html",
styleUrls: ["./sales.page.scss"],
})
export class SalesPage implements OnInit {
public segmentChanged = "orders";
allowed;
orders$;
vouchers$;
device_screen;
filter
constructor(
public modalController: ModalController,
private userFacade: UserFacadeService,
private orderFacade: OrderFacadeService,
private voucherFacade: VoucherFacadeService,
public toastController: ToastController,
private headerStateService: HeaderStateService,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
this.headerStateService.getDesktopSideMenuState().pipe(distinctUntilChanged()).subscribe(
(res)=>{
if(res == 'promotions'){
this.segmentChanged ='promo'
}else{
this.segmentChanged = res
}
console.log(res)
}
)
}
ngOnInit() {
this.filter ="Default"
//get orders then pass data
this.getOrders();
this.loadVouchers();
}
getOrders() {
//get current store
let store = this.userFacade.getCurrentStore();
//load store profile and subscribe store products
if (Object.keys(store).length === 0) {
this.userFacade.getUser$().subscribe(
//load default store
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
this.orderFacade.loadStoreOrdersById(res.store_id);
this.orders$ = this.orderFacade
.getStoreOrders$()
.pipe(distinctUntilChanged());
},
(err) => {
console.log(err);
}
);
} else {
this.userFacade.loadStoreProfile(store._id);
this.orderFacade.loadStoreOrdersById(store._id);
this.orders$ = this.orderFacade
.getStoreOrders$()
.pipe(distinctUntilChanged());
}
}
loadVouchers() {
let store = this.userFacade.getCurrentStore();
//load store profile and subscribe store products
if (Object.keys(store).length === 0) {
this.userFacade.getUser$().subscribe(
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
this.voucherFacade.loadStoreVouchersById({ id: res.store_id });
},
(err) => {
console.log(err);
}
);
} else {
this.userFacade.loadStoreProfile(store._id);
this.voucherFacade.loadStoreVouchersById({ id: store._id });
}
}
getNotification(evt) {
this.segmentChanged = evt;
}
//
changeSegment(data) {
this.segmentChanged = data;
}
searchItems(ev) {
// Reset items back to all of the items
this.orderFacade.loadStoreOrders();
// set val to the value of the searchbar
let val = ev.target.value;
// if the value is an empty string don't filter the items
if (val && val.trim() != "") {
this.orderFacade.loadSearchedStoreOrders(val);
}
}
async presentToast(msg) {
const toast = await this.toastController.create({
message: msg,
duration: 2500,
});
toast.present();
}
salesSegmentChanged(event){
let segment = event.detail.value
if(segment == 'promo'){
this.headerStateService.setDesktopSideMenuState('promotions')
}else{
this.headerStateService.setDesktopSideMenuState(segment)
}
console.log(segment)
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { MainListComponent } from "./main-list/main-list.component";
import {GemionicUiModule} from "src/assets/gm-components/gemionic.ui.module"
@NgModule({
declarations: [
MainListComponent
],
imports: [
CommonModule,
GemionicUiModule
],
exports:[
MainListComponent
]
})
export class AllProductsModule { }
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class EmailsApiService {
_urlSendEmail = `${this.env.apiUrl}/api/emails/send_email`;
constructor(private _http: HttpClient, private env: EnvService) {}
sendEmail(data) {
return this._http.post<any>(this._urlSendEmail, data);
}
}
<file_sep>const express = require("express");
const router = express.Router();
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck")
const sendEmail = require("../../../utils/emails/send_email")
// get user receipts
router.post("/send_email", authCheck, (req, res) => {
let user = req.authData
let body = req.body
// handle attachments
let attachments;
if(
body.attachments.length < 1 ||
body.attachments == null ||
body.attachments == undefined
){
attachments = []
}else{
attachments = body.attachments
}
data = {
//construct data
admin_email: user.email,
user_email: body.email,
subject: body.subject,
message: body.message,
html_message: body.html_message,
attachments: attachments,
},
// send Email
sendEmail(data, req, res)
})
module.exports = router;
<file_sep>import { Component, OnInit, ViewChild } from '@angular/core';
import { IonSlides } from "@ionic/angular";
import { CartService } from "src/app/core-modules/services/cart/cart-state/cart.service";
@Component({
selector: 'app-guest',
templateUrl: './guest.page.html',
styleUrls: ['./guest.page.scss'],
})
export class GuestPage implements OnInit {
public cart;
public edit;
@ViewChild("guest_slides", { static: true }) slides: IonSlides;
constructor(
private cartService: CartService,
) { }
ngOnInit() {
this.cart = this.cartService.getCart();
console.log(this.cart)
}
next(){
this.slides.slideNext();
console.log("okay")
}
decreaseCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.decreaseProduct(sec_pro[0]);
}
// if any
this.cartService.decreaseProduct(product);
}
increaseCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.increaseProduct(sec_pro[0]);
}
// if any
this.cartService.increaseProduct(product);
}
removeCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.removeProduct(sec_pro[0]);
}
// if any
this.cartService.removeProduct(product);
}
edits(product){
this.edit = product
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
@Injectable({
providedIn: "root",
})
export class HeaderStateService {
private headerStatus$ = new BehaviorSubject<string>("default");
public policy_header;
public return_route;
public data_passed;
private detail_header = new BehaviorSubject<boolean>(false);
private vendor_header = new BehaviorSubject<boolean>(false);
public desktop_data_pass = new BehaviorSubject<string>('')
public dash_active_order_selected = new BehaviorSubject<any>('')
constructor() {}
getHeaderStatus() {
return this.headerStatus$.asObservable();
}
updateHeaderStatus(status) {
return this.headerStatus$.next(status);
}
resetHeaderStatus() {
return this.headerStatus$.next("default");
}
setReturnRoute(route) {
this.return_route = route;
}
setDataPassed(data) {
this.data_passed = data;
}
changeDetailHeader(value) {
this.detail_header.next(value);
}
changeVendorPageHeaderState(value) {
this.vendor_header.next(value);
}
getDetailHeader() {
return this.detail_header.asObservable();
}
getVendorPageHeaderState() {
return this.vendor_header.asObservable();
}
getDesktopSideMenuState(){
return this.desktop_data_pass.asObservable()
}
getDashActiveOrderSelect(){
return this.dash_active_order_selected.asObservable()
}
setDashActiveOrderSelect(data){
this.dash_active_order_selected.next(data)
}
setDesktopSideMenuState(data){
this.desktop_data_pass.next(data)
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { RouterModule } from '@angular/router';
import { IonicModule } from '@ionic/angular';
import { AdminComponent } from './side/admin/admin.component';
import { UserComponent } from './side/user/user.component';
import {MatTreeModule} from '@angular/material/tree';
@NgModule({
declarations: [
AdminComponent,
UserComponent
],
imports: [
CommonModule,
RouterModule,
IonicModule,
MatTreeModule
],
exports: [
AdminComponent,
UserComponent
]
})
export class MenusModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { ModalController } from '@ionic/angular';
import { FormGroup, FormArray, FormBuilder, Validators } from '@angular/forms';
import { ProductApiService } from 'src/app/core-modules/services/products/product-api/product-api.service'
import { VoucherApiService } from 'src/app/core-modules/services/vouchers/voucher-api/voucher-api.service'
@Component({
selector: 'app-addpromo',
templateUrl: './addpromo.component.html',
styleUrls: ['./addpromo.component.scss'],
})
export class AddpromoComponent implements OnInit {
public addPromo: FormGroup;
public productList: FormArray;
public products: any;
// returns all form groups under properties
get productFormGroup() {
return this.addPromo.get('products') as FormArray;
}
constructor(
public modalController: ModalController,
private formBuilder: FormBuilder,
private productService: ProductApiService,
private voucherService: VoucherApiService
) { }
ngOnInit() {
this.productService.getStoreProducts().subscribe(
res => {
this.products = res.product
},
err => {
console.log(err)
}
)
this.addPromo = this.formBuilder.group({
run_sale: [false ,Validators.compose([Validators.required])],
type: ['Item_Discount' ,Validators.compose([Validators.required])],
title: ['DecemberSpecial' ,Validators.compose([Validators.required])],
//products: this.formBuilder.array([this.createProduct()]),
products: [[
{
_id:'5e9c6fe56a3e45389012e7af',
quota: 1
}
] ,Validators.compose([Validators.required])],
total_quota: [25 ,Validators.compose([Validators.required])],
platform: ['Youtube' ,Validators.compose([Validators.required])],
discount: [0.1 ,Validators.compose([Validators.required])],
items_exceeding: [0 ,Validators.compose([Validators.required])],
exp_date: ["2020-05-19T15:36:05.635Z" ,Validators.compose([Validators.required])],
})
console.log(this.addPromo.value)
this.voucherService.createVoucher(this.addPromo.value).subscribe(
res => {
console.log(res)
},
err => {
console.log(err)
}
)
// set productList to the form control containing propeties
this.productList = this.addPromo.get('products') as FormArray;
}
// Generate new product
createProduct(): FormGroup {
return this.formBuilder.group({
productname: [null, Validators.compose([Validators.required])],
});
}
addProduct() {
this.productList.push(this.createProduct());
}
removeProduct(index) {
this.productList.removeAt(index);
}
getProductFormGroup(index): FormGroup {
const formGroup = this.productList.controls[index] as FormGroup;
return formGroup;
}
dismiss(){
this.modalController.dismiss({
'dismissed': true
})
}
/*
products
*/
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ImageFacadeService } from "./image-facade.service";
describe("ImageFacadeService", () => {
let service: ImageFacadeService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ImageFacadeService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, OnInit } from "@angular/core";
import { ToastController } from '@ionic/angular';
import { AuthApiService } from 'src/app/core-modules/services/auth/auth-api/auth-api.service';
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-settings",
templateUrl: "./settings.page.html",
styleUrls: ["./settings.page.scss"],
})
export class SettingsPage implements OnInit {
profile;
device_screen;
store;
logo
constructor(
private userFacade: UserFacadeService,
private authApi: AuthApiService,
public toastController: ToastController,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnInit() {
this.userFacade.getUser$().subscribe(
//load store profile
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
this.profile = this.userFacade.getStoreProfile$();
this.profile.subscribe(
(res) => {
this.store = res
this.logo = this.store.logo
console.log(res);
},
(err) => {
console.log(err);
}
);
},
(err) => {
console.log(err);
}
);
}
activateFreeTrial(){
let store = this.userFacade.getCurrentStore();
let today_ms = Date.now()
let one_day_ms = 86400000
let half_month_ms = one_day_ms * 15
let trial_end_period = today_ms + half_month_ms
let data = {
store_id: store._id,
allowed_products: 5,
end_date: trial_end_period
}
if(store.free_trial){
this.presentToast(
"Trial already Activated"
);
}else{
this.authApi.activateFreeTrial({data:data}).subscribe(
res => {
this.presentToast(
"15 Day Trial Activated"
);
},
err => {
console.log(err)
}
)
}
}
async presentToast(msg) {
const toast = await this.toastController.create({
message: msg,
duration: 2500,
});
toast.present();
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ProductsStateService } from "./products-state.service";
describe("ProductsStateService", () => {
let service: ProductsStateService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ProductsStateService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>const mongoose = require("mongoose");
// User Schema
const Schema = mongoose.Schema;
const UserSchema = new Schema({
email: {
type: String,
required: true,
},
password: {
type: String,
required: true,
},
first_name: {
type: String,
},
last_name: {
type: String,
},
address: Schema.Types.Mixed,
profileImage: {
type: Array,
},
date: {
type: Date,
default: Date.now,
},
verified: {
isVerified: {
type: Boolean,
default: false,
},
token: {
type: String,
required: true,
},
},
my_stores: {
type: Array,
},
storeOwner: {
type: Boolean,
default: false,
},
store_id: {
type: String,
},
user_agreement: {
type: Boolean,
},
});
// User model
const User = mongoose.model("users", UserSchema);
module.exports = User;
<file_sep>import { NgModule } from '@angular/core';
import { PreloadAllModules, RouterModule, Routes } from '@angular/router';
import { PaymentPage } from './payment.page';
const routes: Routes = [
{
path: 'payment',
component: PaymentPage,
children: [
{
path: 'checkout',
children:[
{
path: '',
loadChildren: () => import('../pages/checkout/checkout.module').then( m => m.CheckoutPageModule),
},
{
path: 'payfast',
children:[
{ path: 'success', loadChildren: () => import('../pages/payfast/success/success.module').then( m => m.SuccessPageModule)},
{ path: 'cancel', loadChildren: () => import('../pages/payfast/cancel/cancel.module').then( m => m.CancelPageModule)}
]
},
{
path: '',
redirectTo: '/buy/payment/checkout',
pathMatch: 'full'
}
]
},
{
path: '',
redirectTo: '/buy/payment/checkout',
pathMatch: 'full'
}
]
},
{
path: '',
redirectTo: '/buy/payment/checkout',
pathMatch: 'full'
}
]
@NgModule({
imports: [
RouterModule.forChild(routes)
],
exports: [RouterModule]
})
export class PaymentRoutingModule {}
<file_sep>const express = require("express");
const async = require("async");
const fs = require("fs");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck");
const router = express.Router();
// Load Product model
const Activity = require("../../../models/store/Admin_activity");
// push activity
router.post("/pushActivity", authCheck, (req, res) => {
const user = req.authData;
const activity = req.body.activity;
const new_activity = new Activity({
store_id: user.store_id,
type: activity.type,
title: activity.title,
msg: activity.msg,
user_ini: activity.user_ini,
user_name: activity.user_name,
});
new_activity.save().then((activity) => {
res.json({
activity: activity,
});
});
});
// push activity
router.post("/getActivities", authCheck, (req, res) => {
const user = req.authData;
Activity.find({ store_id: user.store_id }).then((activities) => {
res.json({
activities: activities,
});
});
});
module.exports = router;
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class ProductsApiService {
_urlViewStoreProductsById = `${this.env.apiUrl}/api/product/view_vendor_products`;
_urlUpdateProduct = `${this.env.apiUrl}/api/product/updateProduct`;
_urlDeleteProduct = `${this.env.apiUrl}/api/product/deleteProduct`;
_urlUpdateProductImage = `${this.env.apiUrl}/api/product/update_product_image`;
_urlCreateProduct = `${this.env.apiUrl}/api/product/createProduct`;
_urlcreateBulkProduct = `${this.env.apiUrl}/api/product/create_bulk_products`;
constructor(private _http: HttpClient, private env: EnvService) {}
getStoreProductsById(id) {
return this._http.post<any>(this._urlViewStoreProductsById, id);
}
updateProduct(product) {
return this._http.post<any>(this._urlUpdateProduct, product);
}
deleteProduct(id) {
return this._http.post<any>(this._urlDeleteProduct, id);
}
updateProductImage(data) {
return this._http.post<any>(this._urlUpdateProductImage, data);
}
createProduct(data) {
return this._http.post<any>(this._urlCreateProduct, data);
}
createBulkProducts(data) {
return this._http.post<any>(this._urlcreateBulkProduct, data);
}
}
<file_sep>const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
module.exports = function createToken(user, res) {
// User matched
// Create JWT Payload
const payload = {
id: user.id,
email: user.email,
store_id: user.store_id
};
// Sign token
jwt.sign(payload, keys.secretOrKey,{
expiresIn: 31556926 // 1 year
},
(err, token) => {
res.json({
success: true,
token: token,
user: user
});
}
);
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class ImageApiService {
_urlUploadPicture = `${this.env.apiUrl}/api/product/uploadImage`;
_urlDeleteMongoImage = `${this.env.apiUrl}/api/product/delete_mongo_image`;
_urlDeleteCloudImage = `${this.env.apiUrl}/api/product/delete_cloudinary_image`;
constructor(private _http: HttpClient, private env: EnvService) {}
uploadImage(data) {
return this._http.post<any>(this._urlUploadPicture, data);
}
deleteCloudImage(data) {
return this._http.post<any>(this._urlDeleteCloudImage, data);
}
deleteMongoImage(data) {
return this._http.post<any>(this._urlDeleteMongoImage, data);
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ProfileComponent} from './profile/profile.component'
import { StoreProductsComponent} from './store-products/store-products.component'
import { FormsModule } from '@angular/forms';
import {GemionicUiModule} from "src/assets/gm-components/gemionic.ui.module"
@NgModule({
declarations: [
ProfileComponent,
StoreProductsComponent
],
imports: [
CommonModule,
FormsModule,
GemionicUiModule
],
exports: [
ProfileComponent,
StoreProductsComponent
]
})
export class StoreProfileComponentsModule { }
<file_sep>const express = require("express");
const async = require("async");
const fs = require("fs");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck");
const router = express.Router();
// Load Product model
const Notification = require("../../../models/users/Notifications");
// Register Product
router.post("/pushNotification", authCheck, (req, res) => {
const user = req.authData;
const notification = req.body.notification;
});
// push activity
router.get("/getNotifications", authCheck, (req, res) => {
const user = req.authData;
Notification.find({ user_email: user.email }).sort({_id: -1}).then((notifications) => {
res.json({
notifications: notifications,
});
});
});
module.exports = router;
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductStateService } from "src/app/core-modules/services/products/product-state/product-state.service";
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
@Component({
selector: "app-ad-space",
templateUrl: "./ad-space.component.html",
styleUrls: ["./ad-space.component.scss"],
})
export class AdSpaceComponent implements OnInit {
products$;
public data;
constructor(
private productState: ProductStateService,
private router: Router,
private cartFacade: CartFacadeService,
private headerState: HeaderStateService,
private productApi: ProductApiService
) {}
ngOnInit() {
//get recommended products, assign to products
this.productApi
.getSegmentProducts({
value: null,
signal: "recommended_products",
})
.subscribe(
(res) => {
this.products$ = res.products;
if (res.length == 0) {
this.data = true;
} else {
this.data = false;
}
},
(err) => {
console.log(err);
}
);
}
loadRecommendedProducts() {
localStorage.setItem("all_p", "Recommended products");
this.productState.setMainProducts(this.products$);
this.productState.setSearchingProducts(this.products$);
this.router.navigate(["/landing/all-products"]);
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
}
<file_sep>import { Component, OnInit, OnDestroy } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { distinctUntilChanged } from "rxjs/operators";
@Component({
selector: "app-main-list",
templateUrl: "./main-list.component.html",
styleUrls: ["./main-list.component.scss"],
})
export class MainListComponent implements OnInit, OnDestroy {
public products$;
public reset;
categories$;
all_p;
public selected_categgory = [];
public product_type_categories = [];
public current_group;
public categories;
products;
constructor(
private _productFacade: ProductFacadeService,
private cartFacade: CartFacadeService,
private categoryFacade: CategoryFacadeService,
private headerState: HeaderStateService,
private router: Router
) {}
ngOnInit() {
//getCategories
this.categories$ = this.categoryFacade.getCategories$();
//get products as observable
this.all_p = localStorage.getItem("all_p");
this.categories$.subscribe((res) => {
//this.categories = res;
res.forEach((group) => {
//get current group name
if (group.name == localStorage.getItem("current_group")) {
this.current_group = group.name;
//get sub grouped names
// loop through category list and group
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
// Group by subcategor
let grouped = groupBy(group.list, "Categories");
let grouped_sub = Object.keys(grouped);
let cleaned = grouped_sub.filter((sub) => {
return sub !== "";
});
this.selected_categgory = cleaned;
}
});
});
this.products$ = this._productFacade.getMainProducts().pipe(distinctUntilChanged());
console.log(this.products$)
this.reset = 0;
}
count(i) {
this.reset += 1;
if (this.reset == 5) {
this.reset = 1;
}
switch (this.reset) {
case 1:
return "product2";
break;
case 2:
return "productTwo";
break;
case 3:
return "productTwo2";
break;
case 4:
return "product";
break;
}
}
loadCategoryProducts(category) {
this.product_type_categories = [];
this._productFacade.onMarketSearch("");
}
loadSubCategoryProducts(sub_category) {
this._productFacade.onMarketSearch(sub_category);
this.categories$.subscribe((res) => {
//this.categories = res;
res.forEach((group) => {
//get current group name
if (group.name == localStorage.getItem("current_group")) {
this.current_group = group.name;
//get sub grouped names
// loop through category list and group
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
// Group by subcategor
let grouped = groupBy(group.list, "Product type");
let grouped_sub = Object.keys(grouped);
let cleaned = grouped_sub.filter((sub) => {
return sub !== "";
});
this.product_type_categories = cleaned;
}
});
});
}
loadProductTypeCategory(selected) {
this._productFacade.onMarketSearch(selected);
}
searchItems(ev) {
// set val to the value of the searchbar
let val = ev.target.value;
this._productFacade.onMarketSearch(val);
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
loadCurrentCategories(category) {
localStorage.setItem("current_group", category);
}
ngOnDestroy() {
localStorage.removeItem("all_p");
localStorage.removeItem("current_group");
this._productFacade.resetMainList();
}
}
<file_sep><ion-header *ngIf="!header_state">
<ion-toolbar>
<ion-buttons slot="start">
<ion-back-button></ion-back-button>
</ion-buttons>
</ion-toolbar>
</ion-header>
<ion-content style="text-align: justify;">
<ion-img src="./assets/afrobIlustration.svg">
</ion-img>
<ion-label *ngIf="!privacy_value && !user" style=" display: block;
padding: 16px;">Accept the <b>User Agreement</b> and the <b> Data Privacy Policy</b></ion-label>
<ion-card>
<mat-accordion>
<mat-expansion-panel (opened)="panelOpenState = true"
(closed)="panelOpenState = false">
<mat-expansion-panel-header>
<mat-panel-title>
About
</mat-panel-title>
</mat-expansion-panel-header>
<h2>
Vision
</h2>
<ion-text>
We believe it is the entrepreneur that will save Africa and we are fully persuaded to serve their cause with astute tools and next-level practices.
</ion-text>
<h2>
Our Becoming
</h2>
<ion-text>
The inception of AfroB Pty Limited has always been driven by the passionate desire for homegrown application code to serve the unique online needs of many local startup enterprises. We also appreciate that without consumers our ecosystem is incomplete, but more than just curating for consumers, we take pleasure in creating a tailored shopping experience for ourselves and those who appreciate a lifestyle shopping convenience that encourages online shopping in one beautiful experience. Our team comprises of talented business process analysts, backend and front-end developers. We are inspired by the challenges faced by Africans seeking an end-to-end solution that serves as a catalyst to leverage client relationships and improve online revenue.
</ion-text>
<h2>
Mission
</h2>
<ion-text>
We support our vision by cultivating a direct to consumer commercial ecosystem through frequent and efficient online conversations.
</ion-text>
<h2>
Long term objectives
</h2>
<ion-text>
We are an online B2C and B2B service provider that aims to catapult start-up businesses the best way we can without frustrating our customers' ambition in trying to persuade them of our services. Our psychographic market is unique and limited to the core beliefs and views we share to achieve our vision for Africa. Though we press on towards our business goals we would not realise our vision until our innovations spread throughout the African continent.
</ion-text>
<h2>
We aim to achieve the following;
</h2>
<ion-list>
<ion-item>
<ion-label>- Business process services that increase business reach and revenue, respectively in all 52 countries in Africa. (1-5 years)</ion-label>
</ion-item>
<ion-item>
<ion-label>- Establish Fulfillment centres in niche communities for specific niche demands (3-10 years)</ion-label>
</ion-item>
<ion-item>
<ion-label>- Fund promising online business and grow them within our network as a catalyst for ROI. (5-10 years)</ion-label>
</ion-item>
</ion-list>
<h2>
Our Core Values
</h2>
<ion-list>
<ion-item>
<ion-label>
Transparency
</ion-label>
</ion-item>
<ion-item>
<ion-label>
Empathy
</ion-label>
</ion-item>
<ion-item>
<ion-label>
Innovation
</ion-label>
</ion-item>
<ion-item>
<ion-label>
Fun
</ion-label>
</ion-item>
<ion-item>
<ion-label>
Symbiotic growth
</ion-label>
</ion-item>
</ion-list>
</mat-expansion-panel>
<mat-expansion-panel >
<mat-expansion-panel-header>
<mat-panel-title>
USER LICENSE AGREEMENT
<ion-icon *ngIf="!user" color="warning" name="alert-circle"></ion-icon>
<ion-icon *ngIf="user" color="success" name="checkmark-circle"></ion-icon>
</mat-panel-title>
</mat-expansion-panel-header>
<h2>
AFROB END USER LICENSE AGREEMENT -
</h2>
<h6>
PLEASE READ THIS END-USER LICENSE AGREEMENT (“EULA”) CAREFULLY. BY DOWNLOADING,
INSTALLING OR OTHERWISE ACCESSING OR USING THE CARTALIST SOFTWARE, YOU AGREE TO THE
TERMS OF THIS EULA. IF YOU DO NOT AGREE TO THE TERMS OF THIS EULA, DO NOT DOWNLOAD,
INSTALL OR OTHERWISE ACCESS OR USE THE SOFTWARE. IN ADDITION, BY DOWNLOADING,
INSTALLING, COPYING, OR OTHERWISE USING UPDATES THAT YOU MAY RECEIVE FROM CARTALIST
UNDER THIS EULA, YOU AGREE TO BE BOUND BY THE ADDITIONAL LICENSE TERMS THAT ACCOMPANY
SUCH UPDATES. IF YOU DO NOT AGREE TO THE ADDITIONAL LICENSE TERMS THAT ACCOMPANY SUCH
UPDATES, YOU MAY NOT DOWNLOAD INSTALL, COPY, OR USE SUCH UPDATES.
</h6>
<ol>
<li>
<b>
General.
</b>
This EULA is a legal agreement between You (either an individual or a single entity) and AFROB
Pty Ltd or one of its product offerings (“Cartalist”). This EULA governs the Cartalist ecommerce Software
and the Cartalist integrated marketplace Software, as applicable, offered by AfroB as subscription service,
which includes computer and mobile device software (including online and electronic documentation)
and any associated media and printed materials. This EULA applies to license keys, updates, supplements,
add-on components, and govern any product support services related to the Software as described in this
EULA.
</li>
<li>
<b>
License Grant.
</b>
Under the terms and conditions of this EULA, Cartalist grants You the non-exclusive, nontransferable, non-sublicensable right to use Software, in object code form only during the term
</li>
<li>
<b>
Ownership.
</b>
AFROB or its suppliers own the title, copyright and other intellectual property rights in the
Software, and no title to the Software or such intellectual property rights is transferred to You. Thus, You
will not acquire any rights of ownership to the Software except the limited license to use the Software as
expressly set forth in this EULA, and AFROB and its licensors retain all other rights. You agree not to alter
or remove the copyright notice, or any other notices of proprietary rights, that appear on and in the
Software. All right, title and interest in the Software, and unless specified otherwise, in any ideas, know
how, work product and programs which are developed by AFROB in the course of providing any support
and maintenance or professional services, including any enhancements or modifications made to the
Software, shall at all times remain the property of AFROB.
</li>
<li>
<b>
License Restrictions
</b>
You may not modify or alter the Software in any way. You may not disassemble,
decompile or reverse engineer the Software in order to obtain the source code, which is a trade secret of
AFROB. You may not lease, sublicense or otherwise rent the Software and accompanying documentation
to any third-party. You are not authorized to use the Software to provide commercial IT services to any
third party, to provide commercial hosting or timesharing, or to sublicense, rent, or lease the Software
You may not access or use the Software in any way that is adverse to AFROB´s then-current acceptable
use policy. You must perform any benchmark tests of the Software. You shall notify AFROB as soon as You
become aware of any unauthorized use of the Software by any person.
</li>
<li>
<b>
Term.
</b>
The Software is licensed to you on a perpetual basis, meaning that Your access to the Software
continues in perpetuity unless terminated as set forth in this EULA. In any case You may terminate by
cancelling payment of license subscription or by destroying the Software and accompanying
documentation and all copies thereof. This license will also terminate if You fail to comply with any term
or provision of this EULA. You agree upon such termination to cease all use of the Software and destroy
the Software and accompanying documentation and all copies thereof.
</li>
<li>
<b>
Consent to use of data
</b>
You agree that AFROB may collect and use technical information that is gathered
periodically to facilitate the provision of Software updates, product support and other services to You (if
any) related to the Cartalist Software, and to verify compliance with the terms of this EULA. AFROB may
use this information solely to improve its products or to provide services or technologies to You and will
not disclose this information in a form that personally identifies You
</li>
<li>
<b>
Software Maintenance
</b>
<ol type="a">
<li>
<a href="">AFROB</a> may, in its sole and exclusive discretion, elect to provide new product versions to keep the
Software up-to-date, service packs, and basic technical support (no SLAs) in the manner and at those
times as a new product versions, services packs, and basic technical support are provided to other
users of the Software. Any new product versions or service packs that are provided will be deemed
to be part of the Software and subject to the terms of this EULA. Basic technical support can be
accessed via the community user help desk available at the following links: <EMAIL> and
Cartalist FAQ.afrob.co.za
</li>
<li>
Updating the Software may require updates of software not covered by this EULA prior to
installation. Any such updates of the operating system and application software not specifically
covered by this EULA are Your responsibility and will not be provided by AFROB. AFROB's support
under this section are contingent upon your proper use of the Cartalist Software and your
compliance with the terms and conditions of this EULA at all times.
</li>
<li>
If AFROB elects to provide You with Technical Support, it will be your sole responsibility to: (i) comply
with all AFROB-specified operating and troubleshooting procedures and then notify AFROB
immediately of Cartalist Software malfunction and provide AFROB with complete information
thereof; (ii) provide for the security of your confidential information; (iii) establish and maintain
backup systems and procedures necessary to reconstruct lost or altered files, data or programs.
</li>
</ol>
</li>
<li>
<b>
Disclaimer of Warranties
</b>
<ol type="a">
<li>
CARTALIST SOFTWARE MAY BE INCOMPLETE AND MAY CONTAIN INACCURACIES OR ERRORS THAT
COULD CAUSE FAILURES OR LOSS OF DATA. YOU EXPRESSLY ACKNOWLEDGE AND AGREE THAT USE
OF THE SOFTWARE IS AT YOUR SOLE RISK AND THAT THE ENTIRE RISK AS TO SATISFACTORY QUALITY,
PERFORMANCE, ACCURACY AND EFFORT IS WITH YOU.
</li>
<li>
TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THE SOFTWARE IS PROVIDED "AS IS",
WITH ALL FAULTS AND WITHOUT WARRANTY OF ANY KIND, AND AFROB AND ITS SUPPLIERS HEREBY
DISCLAIM ALL WARRANTIES AND CONDITIONS WITH RESPECT TO THE SOFTWARE, EITHER EXPRESS,
IMPLIED OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES AND/OR
CONDITIONS OF MERCHANTABILITY, OF SATISFACTORY QUALITY, OF FITNESS FOR A PARTICULAR
PURPOSE, OF ACCURACY, OF QUIET ENJOYMENT, AND NON-INFRINGEMENT OF THIRD PARTY
RIGHTS. AFROB DOES NOT WARRANT AGAINST INTERFERENCE WITH YOUR ENJOYMENT OF THE
SOFTWARE, THAT THE FUNCTIONS CONTAINED IN THE SOFTWARE WILL MEET YOUR
REQUIREMENTS, THAT THE OPERATION OF THE SOFTWARE WILL BE UNINTERRUPTED OR ERRORFREE, OR THAT DEFECTS IN THE SOFTWARE WILL BE CORRECTED. NO ORAL OR WRITTEN
INFORMATION OR ADVICE GIVEN BY AFROB OR A AFROB AUTHORIZED REPRESENTATIVE SHALL
CREATE A WARRANTY. SHOULD THE SOFTWARE PROVE DEFECTIVE, YOU ASSUME THE ENTIRE COST
OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
</li>
</ol>
</li>
<li>
<b>
Limitation of Liability.
</b>
TO THE MAXIMUM EXTENT PERMITTED BY LAW, IN NO EVENT SHALL AFROB OR
ITS SUPPLIERS BE LIABLE TO YOU FOR ANY DAMAGES, INCLUDING, WITHOUT LIMITATION, ANY SPECIAL,
DIRECT, INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THE SOFTWARE, INCLUDING WITHOUT LIMITATION, LOSS OF
AFROB End User License Agreement © 2020
PROFITS, BUSINESS, DATA, GOODWILL, OR ANTICIPATED SAVINGS, EVEN IF ADVISED OF THE POSSIBILITY
OF THOSE DAMAGES.
</li>
<li>
<b>
Export Restrictions.
</b>
The Software may be subject to export control laws and regulations of the Republic
of South Africa and applicable countries, including but not limited to regulations of the South African
Revenue Services Customs and Excise, prohibiting export of goods, directly or indirectly to “embargoed”
countries. You will ensure that a) the ultimate destination of the Software is not a destination in violation
of any such law or regulation; (b) the ultimate end-user is not a person or entity on the “denied persons
list” and (c) the end-user intended use does not violate any regulations regarding Diversion, Nuclear
Proliferation, Missile Technology or Chemical or Biological Weapons.
</li>
<li>
<b>
Excluded Services.
</b>
This EULA shall not apply to your use of software, cloud services, or professional
services (“Excluded Services”) that you purchase from AFROB. All such Excluded Services are governed
exclusively by the terms of the agreement that you have executed that covers such Excluded Services.
</li>
<li>
<b>
Miscellaneous.
</b>
<ol type="a">
<li>
Severability. If any provision of this EULA is invalid or unenforceable under applicable law, then it shall
be, to that extent, deemed omitted and the remaining provisions will continue in full force and effect.
</li>
<li>
Monitoring/Audit. AFROB shall have the right to monitor and audit your use of the Software upon
reasonable advance notice, by any means, including, without limitation, remote means, to verify your
compliance with the terms of this EULA. You shall be responsible for ensuring that your employees and
independent contractors comply with the terms of this EULA.
</li>
<li>
Governing Law. This EULA, and any dispute or claim arising out of or in connection with it or its subject
matter or formation (including non-contractual disputes or claims), shall be governed by and construed
in accordance with (a), if you downloaded the Software in the Botswana, the laws of Botswana; (b) if
you downloaded the Software in Lesotho, the laws of Lesotho; (c) if you downloaded the Software in
Zambia, the laws of Zambia; (d) if you downloaded the Software in Swaziland, the laws of Swaziland;
and (e) if you downloaded the Software in any other location, the laws of the Republic of South Africa,
in each case without reference to the principles of conflicts of law that would apply the substantive
laws of another jurisdiction. The application of the United Nations Convention on Contracts for the
International Sale of Goods to this EULA is hereby expressly excluded
</li>
<li>
Entire Agreement. This EULA sets forth the entire understanding and agreement between You and
AFROB.
</li>
<li>
Assignment: You shall not assign this EULA without the advance written consent of AFROB. However,
AFROB may assign this EULA in connection with a merger, reorganization, acquisition or other transfer
of all or substantially all of its assets. Any attempt to transfer or assign this EULA except as expressly
authorized under this section, will be null and void
</li>
<li>
Force Majeure: Neither Party will be responsible to the other for any failure or delay in its performance
due to acts of God or other unforeseen circumstances beyond the reasonable control of either party,
provided that such Part gives as reasonable as practicable written notice thereof to the other party
and uses its diligent efforts to resume performance.
</li>
<li>
Survival: All Sections of this EULA that by their nature must survive the termination or expiration of
this EULA, including without limitation Sections 4 (Ownership), 12 (Warranty Disclaimer) and 13
(Limitation of Liability) shall survive any termination or expiration of this EUL
</li>
<li>
Marketing: You acknowledge and agree that AFROB may (i) include the name and logo of the entity
that You represent in a list of Cartalist's Customers to publicize the execution of this EULA, (ii) refer to
the name and logo of the entity that You represent on AFROB's website; and, (iii) refer to the name
and logo of the entity that You represent in marketing materials.
</li>
</ol>
</li>
</ol>
<ion-item>
<ion-label *ngIf="!user">Accept</ion-label>
<ion-label *ngIf="user" color="primary">Accepted</ion-label>
<ion-toggle *ngIf="header_state" (ionChange)="toast('user')" [(ngModel)]="user" ></ion-toggle>
</ion-item>
</mat-expansion-panel>
<mat-expansion-panel (opened)="panelOpenState = true"
(closed)="panelOpenState = false">
<mat-expansion-panel-header>
<mat-panel-title>
DATA PRIVACY POLICY
<ion-icon *ngIf="!privacy_value" color="warning" name="alert-circle"></ion-icon>
<ion-icon *ngIf="privacy_value" color="success" name="checkmark-circle"></ion-icon>
</mat-panel-title>
</mat-expansion-panel-header>
<h2>
AFROB DATA PRIVACY POLICY -
</h2>
<h6>
PLEASE READ THIS DATA POLICY CAREFULLY. BY DOWNLOADING, INSTALLING OR OTHERWISE
ACCESSING OR USING THE CARTALIST SOFTWARE, YOU AGREE TO THE TERMS OF THIS DATA POLICY. IF
YOU DO NOT AGREE TO THE TERMS OF THIS DATA, DO NOT DOWNLOAD, INSTALL OR OTHERWISE
ACCESS OR USE THE SOFTWARE. IN ADDITION, BY DOWNLOADING, INSTALLING, COPYING, OR
OTHERWISE USING UPDATES THAT YOU MAY RECEIVE FROM CARTALISTUNDER THIS DATA POLICY, YOU
AGREE TO BE BOUND BY THE ADDITIONAL DATA POLICY TERMS THAT ACCOMPANY SUCH UPDATES. IF
YOU DO NOT AGREE TO THE ADDITIONAL POLICY TERMS THAT ACCOMPANY SUCH UPDATES, YOU MAY
NOT DOWNLOAD INSTALL, COPY, OR USE SUCH UPDATES.
</h6>
<h2>
Data Policy
</h2>
<ion-text>
This policy describes the information we process to support Cartalist software and other software
and features offered by AFROB Pty Ltd. You can find additional tools and information in
the Cartalist Settings.
</ion-text>
<ol type="I">
<li>
<h6>
What kinds of information do we collect?
</h6>
<ion-text>
To provide the Cartalist Software, we must process information about you as an individual and or
business entity. The types of information we collect depend on how you use our software. You can
learn how to access and delete information we collect by visiting the Cartalist subscription setting
(licenses.afrob.co.za)
</ion-text>
<h6>
Things you and others do and provide.
</h6>
<ul>
<li>
<b>
Information and content you provide.
</b>
<ion-text>
We collect the content, communications and other
information you provide when you use our Cartalist software, including when you sign
up for an account, create or share content, and message or communicate with others.
This can include information in or about the content you provide (like metadata), such
as the location of a photo or the date a file was created. It can also include what you see
through features we provide, such as our your location, so we can do things like suggest
nearby customers to your store that might show interest in your store, or give you tips
on improving your online store. Our systems automatically process content and
communications you and others provide to analyze context and what is in them for the
purposes described below. Learn more about how you can control who can see your
store
</ion-text>
</li>
<li>
<ion-text>
Data with special protections: You can choose to provide information in your
Cartalist profile fields or about your shopping interest, history of purchases, shipping
address. This and other information (such as racial or ethnic origin, philosophical
beliefs, or trade union membership) could be subject to special protections under the
laws of your country.
</ion-text>
</li>
<li>
<b>
Networks and connections.
</b>
<ion-text>
We collect information about the people, vendors
accounts, hashtags you are connected to and how you interact with them across our
Software, such as stores you visit and communicate or transact with the most.
</ion-text>
</li>
<li>
<b>
Your usage.
</b>
<ion-text>
We collect information about how you use our Software, such as the types of
content you view or engage with; the features you use; the actions you take; the people
or vendor accounts you interact with; and the time, frequency and duration of your
activities. For example, we log when you're using and have last used our Software, and
what reviews, shared content and other content you view on our Cartalist Software.
</ion-text>
</li>
<li>
<b>
Information about transactions made on our Software.
</b>
<ion-text>
If you use our Software
for purchases or other financial transactions (such as when you make a purchase in a
vendor store or our marketplace), we collect information about the purchase or
transaction. This includes payment information, such as your credit or debit card number
and other card information; other account and authentication information; and billing,
shipping and contact details.
</ion-text>
</li>
<li>
<b>
Things others do and information they provide about you.
</b>
<ion-text>
We also receive and analyze
content, communications and information that other people provide when they use our
Software. This can include information about you, such as when others share or reply to
your review on a product or vendor store, send a message to you.
</ion-text>
</li>
</ul>
</li>
<li>
<h6>
Device Information
</h6>
<ion-text>
As described below, we collect information from and about the computers, phones, connected
TVs and other web-connected devices you use that integrate with our Software, and we combine
this information across different devices you use. For example, we use information collected about
your use of our Software on your phone to better personalize the content (including ads) or
features you see when you use our Software on another device, such as your laptop or tablet, or
to measure whether you took an action in response to an ad we showed you on your phone on a
different device.
Information we obtain from these devices includes:
</ion-text>
<ul>
<li>
<b>
Device attributes:
</b>
<ion-text>
information such as the operating system, hardware and software
versions, battery level, signal strength, available storage space, browser type, app and
file names and types, and plugins.
</ion-text>
</li>
<li>
<b>
Device operations:
</b>
<ion-text>
information about operations and behaviors performed on the device,
such as whether a window is foregrounded or backgrounded, or mouse movements
(which can help distinguish humans from bots).
</ion-text>
</li>
<li>
<b>
Identifiers:
</b>
<ion-text>
unique identifiers, device IDs, and other identifiers, such as from games, apps
or accounts you use, and Family Device IDs.
</ion-text>
</li>
<li>
<b>
Device signals:
</b>
<ion-text>
Bluetooth signals, and information about nearby Wi-Fi access points,
beacons, and cell towers.
</ion-text>
</li>
<li>
<b>
Data from device settings:
</b>
<ion-text>
information you allow us to receive through device settings you
turn on, such as access to your GPS location, camera or photos.
</ion-text>
</li>
<li>
<b>
Network and connections:
</b>
<ion-text>
information such as the name of your mobile operator or ISP,
language, time zone, mobile phone number, IP address, connection speed.
</ion-text>
</li>
<li>
<b>
Cookie data:
</b>
<ion-text>
data from cookies stored on your device, including cookie IDs and settings.
</ion-text>
</li>
</ul>
<h6>
Information from partners.
</h6>
<ion-text>
Advertisers, app developers, and publishers can send us information through the Cartalist
software they use, including our social plug-ins (such as the Like button), Cartalist Login, our APIs
and SDKs. These partners provide information about your activities off Cartalist—including
information about your device, websites you visit, purchases you make, the ads you see, and how
you use their services—whether or not you have a Cartalist account or are logged into Cartalist
software. For example, a vendor store developer could use our API to tell us what software you
are interested in buying, or a business could tell us about a purchase you made in its store. We
also receive information about your online and offline actions and purchases from third-party data
providers who have the rights to provide us with your information.
Partners receive your data when you visit or use their services or through third parties they work
with. We require each of these partners to have lawful rights to collect, use and share your data
before providing any data to us.
</ion-text>
</li>
<li>
<h6>
How do we use this information?
</h6>
<ion-text>
We use the information we have (subject to choices you make) as described below and to provide
and support the AFROB Software and related services described in the AFROB’s End User License
Agreement.
</ion-text>
<h6>
We seek to personalize and improve our Software.
</h6>
<ion-text>
We use the information we have to deliver our Software, including to personalize features and
content (including the Marketplace feed and ads) and make suggestions for you (such as
categories or software you may be interested in on and off our Software. To create personalized
Software that are unique and relevant to you, we use your purchase history, preferences, interests
and activities based on the data we collect and learn from you and others (including any data with
special protections you choose to provide); how you use and interact with our Software; and the
people, places, or things shown interest or transacted with in on and off our Softwar
</ion-text>
<b>
Exchange information on Cartalist Software and devices:
</b>
<ion-text>
We connect information about your
activities on different AFROB Software and devices to provide a more tailored and consistent
experience on all AFROB Software you use, wherever you use them. For example, we can suggest
that you try our multi-currency feature Cartalist that includes transactions outside your local
currency to increase your online sales territory. We can also make your experience more seamless,
for example, by automatically filling in your registration information (such as your phone number)
from one AFROB Product when you sign up for a use on a different product.
</ion-text>
<ul>
<li>
<b>
Location-related information:
</b>
<ion-text>
We use location-related information-such as your current
location, where you live, the places you like +/to go, and the businesses and people
you're near-to provide, personalize and improve our Software, including ads, for you and
others. Location-related information can be based on things like precise device location
(if you've allowed us to collect it), IP addresses, and information from your and others'
use of AFROB Software (such as delivery management).
</ion-text>
</li>
<li>
<b>
Product research and development:
</b>
<ion-text>
We use the information we have to develop, test and
improve our Software, including by conducting surveys and research, and testing and
troubleshooting new software and features
</ion-text>
</li>
<li>
<b>
Face recognition:
</b>
<ion-text>
If you have it turned on, we use face recognition technology to recognize
you and verify biometric access control. The face-recognition templates we create may
constitute data with special protections under the laws of your country. If we introduce
face-recognition technology to your Cartalist software experience, we will let you know
first, and you will have control over whether we use this technology for you.
</ion-text>
</li>
<li>
<b>
Ads and other sponsored content:
</b>
<ion-text>
We use the information we have about you-including
information about your interests, actions and connections-to select and personalize ads,
offers and other sponsored content that we show you.
</ion-text>
</li>
</ul>
<h6>
Our analytics, and other business services
</h6>
<ion-text>
We use the information we have (including your activity off our Software, such as the websites
you visit and ads you see) to help advertisers and other partners measure the effectiveness and
distribution of their ads and services, and understand the types of people who use their services
and how people interact with their websites, apps, and services
</ion-text>
<h6>
We care about safety, integrity and security
</h6>
<ion-text>
We use the information we have to verify accounts and activity, combat harmful conduct, detect
and prevent spam and other bad experiences, maintain the integrity of our Software, and promote
safety and security on and off of AFROB’S Software. For example, we use data we have to
investigate suspicious activity or violations of our terms or policies.
</ion-text>
<h6>
Update Correspondences to you
</h6>
<ion-text>
We use the information we have to send you marketing communications, communicate with you
about our Software, and let you know about our policies and terms. We also use your information
to respond to you when you contact us.
</ion-text>
<h6>
Research and innovate for social good.
</h6>
<ion-text>
We use the information we have (including from research partners we collaborate with) to
conduct and support research and innovation on topics of that promote customer experience,
technological advancement. For example, we analyze information we have about shoppers’
patterns during different seasons to advice on business trends.
</ion-text>
</li>
<li>
<h6>
How is this information shared?
</h6>
<ion-text>
Your information is shared with others in the following ways:
</ion-text>
</li>
<li>
<h6>
Sharing on Cartalist Software
</h6>
<b>
People and accounts you share and communicate with
</b><br>
<ion-text>
When you share and communicate using our Software, you choose the audience for what you
share. For example, when you promote a product on Cartalist, you select the audience for the
post, such as a group, region, or a customized list of people.
Subscribers can also view our Software to review and share products or purchase history content
about with the audience they choose. For example, people can share a URL link of your product
on their social media platforms.
</ion-text>
<b>
Apps, websites, and third-party integrations on or using our Software.
</b><hr>
<ion-text>
When you choose to use third-party apps, websites, or other services that use, or are integrated
with, our Software, they can receive information about what you purchase, products and reviews.
Also, when you download or use such third-party services, they can access your public profile on
Cartalist, and any information that you share with them. Information collected by these third-party
services is subject to their own terms and policies, not this one.
Note: We are in the process of restricting developers’ data access even further to help prevent
abuse. For example, we will remove developers' access to your Cartalist data if you haven't used
their app in 3 months, and we are changing Login, so that in the next version, we will reduce the
data that an app can request without app review to include only name, username and bio, profile
photo and email address. Requesting any other data will require our approval.
</ion-text>
<b>
New owner
</b><br>
<ion-text>
If the ownership or control of all or part of our Software or their assets changes, we may transfer
your information to the new owner.
</ion-text>
<b>
Sharing with Third-Party Partners
</b><br>
<ion-text>
We work with third-party partners who help us provide and improve our Software or who use
Cartalist to grow their businesses, which makes it possible to operate our companies and provide
essential services to our subscribers. We don't sell any of your information to anyone, and we
never will. We also impose strict restrictions on how our partners can use and disclose the data
we provide. Here are the types of third parties we share information with:
</ion-text>
<b>
Partners who use our analytics services
</b><br>
<ion-text>
We provide aggregated statistics and insights that help people and businesses understand how
people are engaging with their listings, store pages, images, blogs, campaigns and other content
on and off the Cartalist Software. For example, store page admins receive information about the
number of people or accounts who visited, reacted to, or commented on their products, as well
as aggregate demographic and other information that helps them understand interactions with
their store page or account.
</ion-text>
<b>
Advertisers.
</b>
<br>
<ion-text>
We provide advertisers with reports about the kinds of people seeing their ads and how their ads
are performing, but we don't share information that personally identifies you (information such
as your name or email address that by itself can be used to contact you or identifies who you are)
unless you give us permission. For example, we provide general demographic and interest
information to advertisers (for example, that an ad was seen by a woman between the ages of 25
and 34 who lives in Johannesburg and likes software engineering) to help them better understand
their audience. We also confirm which Cartalist ads led you to make a purchase or take an action
with an advertiser
</ion-text>
<br>
<b>
Measurement partners.
</b>
<br>
<ion-text>
We share information about you with companies that aggregate it to provide analytics and
measurement reports to our partners.
</ion-text>
<br>
<b>
Partners offering goods and services in our Software.
</b>
<br>
<ion-text>
When you subscribe to receive premium content, or buy something from a seller in our Software,
the content creator or seller can receive your public information and other information you share
with them, as well as the information needed to complete the transaction, including shipping and
contact details.
</ion-text>
<br>
<b>
Vendors and service providers.
</b><br>
<ion-text>
We provide information and content to vendors and service providers who support our business,
such as by providing technical infrastructure services, analyzing how our Software are used,
providing customer service, facilitating payments, or conducting surveys.
</ion-text>
<br>
<b>
Researchers and academics.
</b><br>
<ion-text>
We also provide information and content to research partners to conduct research that advances
innovation that support our business or mission and enhances discovery and innovation on topics
of general social welfare, technological advancement, public interest and well-being.
</ion-text>
<br>
<b>
Law enforcement or legal requests.
</b>
<br>
<ion-text>
We share information with law enforcement or in response to legal requests in the circumstances
outlined below.
</ion-text>
</li>
<li>
<h6> How can I manage or delete information about me? </h6>
<ion-text>
We provide you with the ability to access, rectify and port your data.
We store data until it is no longer necessary to provide our services, or until your account is
deleted. In the case that there are existing transactions we will store the data for up to 5 years for
tax record purposes. This is a case-by-case determination that depends on things like the nature
of the data, why it is collected and processed, and relevant legal or operational retention needs.
</ion-text>
</li>
<li>
<h6>
How do we respond to legal requests or prevent harm?
</h6>
<ion-text>
We access, preserve, and share your information with regulators, law enforcement or others:
</ion-text>
<ul>
<li>
<ion-text>In response to a legal request (like a search warrant, court order or subpoena) if we have a good
faith belief that the law requires us to do so. This may include responding to legal requests from
jurisdictions outside of the United States when we have a good-faith belief that the response is
required by law in that jurisdiction, affects users in that jurisdiction, and is consistent with
internationally recognized standards</ion-text>
</li>
</ul>
<ion-text>
Information we receive about you (including financial transaction data related to purchases made
with Cartalist) can be accessed and preserved for an extended period when it is the subject of a
legal request or obligation, governmental investigation, or investigations of possible violations of
our terms or policies, or otherwise to prevent harm. We also retain information from accounts
disabled for terms violations for at least a year to prevent repeat abuse or other term violations.
</ion-text>
<h6>
How do we operate and transfer data as part of our global services?
</h6>
<ion-text>
We share information globally, both internally within the AfroB company, and externally with our
partners and with those you connect and share with around the world in accordance with this
policy. Your information may, for example, be transferred or transmitted to, or stored and
processed in the Republic of South Africa or other countries outside of where you live for the
purposes as described in this policy. These data transfers are necessary to provide the services set
forth in the AfroB End User License Agreement (EULA) and to globally operate and provide our
Software to you. We utilize standard contract clauses, rely on the African Union Convention On
Cyber Security And Personal Data Protection about certain countries, as applicable, and obtain
your consent for these data transfers to the Republic of South Africa and other countries.
</ion-text>
</li>
<li>
<h6>
How will we notify you of changes to this policy?
</h6>
<ion-text>
We'll notify you before we make changes to this policy and give you the opportunity to review the
revised policy before you choose to continue using our Software.
</ion-text>
</li>
</ol>
<ion-item>
<ion-label *ngIf="!privacy_value">Accept</ion-label>
<ion-label *ngIf="privacy_value" color="primary">Accepted</ion-label>
<ion-toggle *ngIf="header_state" (ionChange)="toast('privacy')" [(ngModel)]="privacy_value" ></ion-toggle>
</ion-item>
</mat-expansion-panel>
</mat-accordion>
</ion-card>
<div *ngIf="header_state" class="center">
<ion-button fill="solid" shape="round" (click)="next()">
Continue
</ion-button>
</div>
</ion-content>
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class ReceiptApiService {
_urlUserReceipts = `${this.env.apiUrl}/api/receipt/get_user_receipts`;
_urlStoreReceipts = `${this.env.apiUrl}/api/receipt/get_store_receipts`;
_urlStoreReceiptsById = `${this.env.apiUrl}/api/receipt/get_store_receiptsById`;
_urlOrderReceipt = `${this.env.apiUrl}/api/receipt/get_order_receipt`;
_urlUpdateReceipt = `${this.env.apiUrl}/api/receipt/update_receipt`;
_urlPayfastReceipts = `${this.env.apiUrl}/api/receipt/get_payfast_receipt`;
constructor(private _http: HttpClient, private env: EnvService) {}
getUserReceipts() {
return this._http.get<any>(this._urlUserReceipts);
}
getStoreReceipts() {
return this._http.get<any>(this._urlStoreReceipts);
}
getStoreReceiptsById(id) {
return this._http.post<any>(this._urlStoreReceiptsById, id);
}
getOrderReceipt(data) {
return this._http.post<any>(this._urlOrderReceipt, data);
}
updateReceipt(data) {
return this._http.put<any>(this._urlUpdateReceipt, data);
}
//only for cartalist
getPayfastReceipts() {
return this._http.get<any>(this._urlPayfastReceipts);
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { ProductStateService } from "src/app/core-modules/services/products/product-state/product-state.service";
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { VoucherFacadeService } from "src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service";
@Component({
selector: "app-sales",
templateUrl: "./sales.component.html",
styleUrls: ["./sales.component.scss"],
})
export class SalesComponent implements OnInit {
public reset = 1;
public products$;
public more = 4;
public sale_length;
constructor(
private router: Router,
private cartFacade: CartFacadeService,
private headerState: HeaderStateService,
private _productFacade: ProductFacadeService,
private productState: ProductStateService,
private voucherFacade: VoucherFacadeService
) {}
ngOnInit() {
//this.products$ = this.productState.getSegmentProducts("sales_products");
this.products$ = this.voucherFacade.getVouchersProducts();
this.products$.subscribe((res) => {
console.log(res.length)
this.sale_length = res.length;
});
}
loadSalesProducts() {
this.products$.subscribe(
(res) => {
localStorage.setItem("all_p", "Products on sale");
this.productState.setSearchingProducts(res);
this.productState.setMainProducts(res);
this.router.navigate(["/landing/all-products"]);
},
(err) => {
console.log(err);
}
);
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
seeMore() {
this.more = 10;
}
calculatePerc(price,original){
let i =( (original - price )/ original)*100
let percent = Math.trunc(i)
return percent
}
}
<file_sep>import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { FormsModule } from "@angular/forms";
import { Routes, RouterModule } from "@angular/router";
import { IonicModule } from "@ionic/angular";
import { MatGoogleMapsAutocompleteModule } from "@angular-material-extensions/google-maps-autocomplete";
import { StoreRegisterPage } from "./store-register.page";
import { ReactiveFormsModule } from "@angular/forms";
const routes: Routes = [
{
path: "",
component: StoreRegisterPage,
},
];
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
RouterModule.forChild(routes),
MatGoogleMapsAutocompleteModule,
ReactiveFormsModule,
],
declarations: [StoreRegisterPage],
})
export class StoreRegisterPageModule {}
<file_sep>import { Component, OnInit, ViewChild } from "@angular/core";
import {
FormGroup,
FormControl,
FormBuilder,
Validators,
} from "@angular/forms";
import { Router } from "@angular/router";
import { AuthApiService } from "src/app/core-modules/services/auth/auth-api/auth-api.service";
import { PasswordValidator } from "src/app/modules/auth/shared/password.validator";
import { IonSlides } from "@ionic/angular";
import { ToastController } from "@ionic/angular";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-ad-user",
templateUrl: "./ad-user.component.html",
styleUrls: ["./ad-user.component.scss"],
})
export class AdUserComponent implements OnInit {
@ViewChild("userSlides") slides: IonSlides;
public Submit_load;
public submitted: boolean;
//initialize new empty form-group of type FormGroup
private slideOne: FormGroup;
public modules;
device_screen;
showNext = true;
showBack = false;
constructor(
private formBuilder: FormBuilder,
private _authService: AuthApiService,
private router: Router,
public toastController: ToastController,
breakpointObserver: BreakpointObserver,
private userFacade: UserFacadeService
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnInit() {
//create new form-group instance
this.slideOne = this.formBuilder.group({
//create instances of form-control
email: [
"",
Validators.compose([
Validators.maxLength(50),
Validators.pattern(
"^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$"
),
Validators.required,
]),
],
user_type: [""],
role: [""],
});
}
onChange(event) {
this.modules = event.detail.value;
}
deleteModule(data) {
let modules = this.modules.filter((module) => {
return module !== data;
});
this.modules = modules;
}
onSubmit() {
let store = this.userFacade.getCurrentStore();
let data = {
email: this.slideOne.value.email,
user_type: this.slideOne.value.user_type,
role: this.slideOne.value.role,
module_access: this.modules,
password: "<PASSWORD>",
store_id: store._id,
};
//subscribe to api ox2
this._authService.adminRegister(data).subscribe(
(res) => {
if (res.store) {
this.userFacade.setCurrentStore(res.store);
this.userFacade.loadUser();
this.router.navigate(["/admin-store/store"])
}
this.presentToast(res.msg)
},
(err) => {
console.log(err.err);
this.presentToast(err.msg);
}
);
}
next() {
this.slides.slideNext();
}
desktopNext( ){
this.slides.lockSwipes(false);
this.slides.slideNext();
this.showNext =false
this.showBack = true
this.slides.lockSwipes(true);
}
slidesBack(){
this.slides.lockSwipes(false);
this.slides.slidePrev();
this.showNext =true
this.showBack = false
this.slides.lockSwipes(true);
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 2000,
position: "bottom",
});
toast.present();
}
}
<file_sep>import { NgModule } from "@angular/core";
import { PreloadAllModules, RouterModule, Routes } from "@angular/router";
import { LandingPage } from "./landing.page";
const routes: Routes = [
{
path: "landing",
component: LandingPage,
children: [
{
path: "home",
children: [
{
path: "",
loadChildren: () =>
import("src/app/modules/user/pages/home/home.module").then(
(m) => m.HomePageModule
),
},
],
},
{
path: "product-detail/:id",
children: [
{
path: "",
loadChildren: () =>
import(
"src/app/modules/user/pages/product-detail/product-detail.module"
).then((m) => m.ProductDetailPageModule),
},
],
},
{
path: "all-products",
children: [
{
path: "",
loadChildren: () =>
import(
"src/app/modules/user/pages/see-all-products-home/see-all-products-home.module"
).then((m) => m.SeeAllProductsHomePageModule),
},
],
},
{
path: "",
redirectTo: "/user/landing/home",
pathMatch: "full",
},
],
},
{
path: "",
redirectTo: "/user/landing/home",
pathMatch: "full",
},
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class LandingRoutingModule {}
<file_sep>const express = require("express");
const router = express.Router();
//this file allow images upload folder to push to git
//do not delete
module.exports = router;
<file_sep>import { Component, OnInit, Input, Output, EventEmitter } from "@angular/core";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { Observable, BehaviorSubject } from "rxjs";
import { FormsModule } from "@angular/forms";
import { LoadingController } from "@ionic/angular";
import { Papa } from "ngx-papaparse";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
@Component({
selector: "app-category-comp",
templateUrl: "./category.component.html",
styleUrls: ["./category.component.scss"],
})
export class CategoryComponent implements OnInit {
public productsSegment;
@Output() shareCategory = new EventEmitter();
//observables
categories$;
isUpdating$;
//categories load
jsonData: any[] = [];
csvData: any[] = [];
headerRow: any[] = [];
uploaded: boolean = false;
localUrl;
CategoryReady: boolean;
constructor(
private categoryFacade: CategoryFacadeService,
public loadingController: LoadingController,
private papa: Papa
) {}
ngOnInit() {
//getCategories
this.categories$ = this.categoryFacade.getCategories$();
}
async presentLoading() {
const loading = await this.loadingController.create({
message: "Please wait...",
duration: 500,
});
await loading.present();
const { role, data } = await loading.onDidDismiss();
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ActivatedRoute, Router } from "@angular/router";
import { ProductsFacadeService } from "src/app/core-modules/services/admin/products/products-facade/products-facade.service";
import { ProfileApiService } from "src/app/core-modules/services/shared/profile/profile-api/profile-api.service";
import { UserFacadeService } from "src/app/core-modules/services/shared/profile/profile-facade/profile-facade.service";
import { CartFacadeService } from "src/app/core-modules/services/user/cart/cart-facade/cart-facade.service";
import { CartService } from "src/app/core-modules/services/user/cart/cart-state/cart.service";
import { HeaderStateService } from "src/app/core-modules/services/utils/header-state/header-state.service";
@Component({
selector: "app-vendor-profile",
templateUrl: "./vendor-profile.page.html",
styleUrls: ["./vendor-profile.page.scss"],
})
export class VendorProfilePage implements OnInit {
public id: string;
constructor(
private route: ActivatedRoute,
private userFacade: UserFacadeService,
private _profileApi: ProfileApiService,
private cartFacade: CartFacadeService,
private _productFacade: ProductsFacadeService,
private headerState: HeaderStateService,
private cartService: CartService,
private router: Router
) {}
ngOnInit() {
this.id = this.route.snapshot.paramMap.get("id");
//get all vendor products and subscribe
this._productFacade.loadStoreProducts(this.id);
//load profile
this.userFacade.loadStoreProfile(this.id);
}
goHome() {
this.router.navigate(["/user/landing"]);
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ProductControllerService } from "./product-controller.service";
describe("ProductControllerService", () => {
let service: ProductControllerService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ProductControllerService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, OnInit } from "@angular/core";
import { ToastController } from "@ionic/angular";
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/utils/header-state/header-state.service";
@Component({
selector: "app-policies",
templateUrl: "./policies.page.html",
styleUrls: ["./policies.page.scss"],
})
export class PoliciesPage implements OnInit {
public privacy_value;
public user;
public header_state;
register_store;
constructor(
public toastController: ToastController,
private headerState: HeaderStateService,
private router: Router
) {}
ngOnInit() {
this.header_state = this.headerState.policy_header;
console.log(this.header_state);
this.register_store = history.state.rs;
}
async presentToastWithOptions(msg, data) {
const toast = await this.toastController.create({
header: "Acceptance Confirmation!",
message: msg,
position: "bottom",
buttons: [
{
text: "Confirm",
role: "cancel",
handler: () => {
console.log("confirmed clicked");
},
},
],
});
toast.present();
}
toast(data) {
switch (data) {
case "user": {
if (this.user == true) {
let msg = "You have Accepted the AFROB END USER LICENSE AGREEMENT";
this.presentToastWithOptions(msg, data);
} else if (this.user == false) {
let msg =
"You have Not Accepted the AFROB END USER LICENSE AGREEMENT";
this.presentToastWithOptions(msg, data);
}
break;
}
case "privacy": {
if (data == "privacy" && this.privacy_value == true) {
let msg = "You have Accepted the DATA PRIVACY POLICY ";
this.presentToastWithOptions(msg, data);
} else if (data == "privacy" && this.privacy_value == false) {
let msg = "You have Not Accepted the DATA PRIVACY POLICY ";
this.presentToastWithOptions(msg, data);
}
break;
}
}
}
async presentToast(msg) {
const toast = await this.toastController.create({
message: msg,
duration: 3500,
});
toast.present();
}
next() {
if (this.privacy_value && this.user) {
this.router.navigate(["/auth/user/welcome"], {
state: { rs: this.register_store },
});
this.headerState.policy_header = false;
} else {
let msg =
" Accept the USER LICENSE AGREEMENT and the DATA PRIVACY POLICY first";
this.presentToast(msg);
}
}
}
<file_sep>import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { RouterModule } from "@angular/router";
import { IonicModule } from "@ionic/angular";
import { FormsModule } from "@angular/forms";
import { ReactiveFormsModule } from "@angular/forms";
import { MatExpansionModule } from "@angular/material/expansion";
import { ProductsComponent } from "./products/products.component";
import { CategoryComponent } from "./category/category.component";
import { AddProductComponent } from "./products/add-product/add-product.component";
import { InventoryComponent } from "./inventory/inventory.component";
import { ProductViewModalComponent } from "../modals/product-view-modal/product-view-modal.component";
import { CsvComponent } from "./csv/csv.component";
import { ImagesComponent } from "./products/images/images.component";
import { ImageCropperModule } from "ngx-image-cropper";
import {ProductsPopoverComponent} from "./products/products-popover/products-popover.component";
import {AutosizeModule} from 'ngx-autosize';
@NgModule({
declarations: [
CategoryComponent,
ProductsComponent,
AddProductComponent,
InventoryComponent,
CsvComponent,
ImagesComponent,
ProductViewModalComponent,
ProductsPopoverComponent
],
imports: [
CommonModule,
RouterModule,
IonicModule,
FormsModule,
ReactiveFormsModule,
MatExpansionModule,
ImageCropperModule,
AutosizeModule
],
exports: [
CategoryComponent,
ProductsComponent,
AddProductComponent,
InventoryComponent,
CsvComponent,
ProductViewModalComponent,
ImagesComponent,
ProductsPopoverComponent
],
entryComponents: [],
})
export class ProductsComponentsModule {}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { VoucherFacadeService } from "src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service";
import { Router } from "@angular/router";
import { ProductStateService } from "src/app/core-modules/services/products/product-state/product-state.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { PickerController } from "@ionic/angular";
import { PickerOptions } from "@ionic/core";
import { async } from '@angular/core/testing';
@Component({
selector: "app-promotions",
templateUrl: "./promotions.component.html",
styleUrls: ["./promotions.component.scss"],
})
export class PromotionsComponent implements OnInit {
public products$;
public promo_type;
public products_with_promos;
public promotions = ['Buy one get one free',
'Discounted price',
'Bulk Volume Discount','Combo Sales']
constructor(
private _productFacade: ProductFacadeService,
private voucherFacade: VoucherFacadeService,
private productState: ProductStateService,
private router: Router,
private cartFacade: CartFacadeService,
private headerState: HeaderStateService,
private pickerController: PickerController
) {}
async showPicker() {
let options: PickerOptions = {
buttons: [
{
text: "Cancel",
role: 'cancel'
},
{
text:'Ok',
handler:(value:any) => {
console.log(value);
}
}
],
columns:[{
name:'Promotions',
options:this.getColumnOptions()
}]
};
let picker = await this.pickerController.create(options);
picker.present()
picker.onDidDismiss().then(async data =>{
let col = await picker.getColumn('Promotions');
this.promo_type = col.options[col.selectedIndex].value
})
}
getColumnOptions(){
let options = [];
this.products$.subscribe(
(res) => {
this.products_with_promos = res
},
(err) => {
console.log(err);
}
);
for (let i= 0; i < this.promotions.length; i++) {
const type = this.promotions[i]
switch(type) {
case 'Buy one get one free': {
const filtered = this.products_with_promos.filter(function(product) {
return product.sale_type == 'buy1get1free'
});
if(filtered.length > 0){
options.push({text:type,value:'buy1get1free'})
}
break;
}
case 'Discounted price': {
const filtered = this.products_with_promos.filter(function(product) {
return product.sale_type == 'itemDiscount'
});
if(filtered.length > 0){
options.push({text:type,value:'itemDiscount'})
}
break;
}
case 'Bulk Volume Discount': {
const filtered = this.products_with_promos.filter(function(product) {
return product.sale_type == 'volume'
});
if(filtered.length > 0){
options.push({text:type,value:'volume'})
}
break;
}
case 'Combo Sales': {
const filtered = this.products_with_promos.filter(function(product) {
return product.sale_type == 'combo'
});
if(filtered.length > 0){
options.push({text:type,value:'combo'})
}
break;
}
}
}
console.log(options)
return options;
}
ngOnInit() {
this.products$ = this.voucherFacade.getVouchersProducts();
}
selectPromo(data) {
this.promo_type = data;
}
onback() {
this.promo_type = false;
}
loadPromoProducts() {
this.products$.subscribe(
(res) => {
this.productState.setSearchingProducts(res);
this.productState.setMainProducts(res);
this.router.navigate(["/landing/all-products"]);
},
(err) => {
console.log(err);
}
);
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
}
<file_sep>import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { SlidesLayoutPage } from './slides-layout.page';
const routes: Routes = [
{
path: 'add',
component: SlidesLayoutPage,
children: [
{
path: "product",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/products/product-addition/product-addition.module"
).then((m) => m.ProductAdditionPageModule),
},
{
path: "promo",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/sales/promotion-addition/promotion-addition.module"
).then((m) => m.PromotionAdditionPageModule),
},
{
path: "user",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/users/user-addition/user-addition.module"
).then((m) => m.UserAdditionPageModule),
},
]
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class SlidesLayoutPageRoutingModule {}
<file_sep>export var single = [
{
"name": "Jan",
"value": 894
},
{
"name": "Feb",
"value": 500
},
{
"name": "Mar",
"value": 720
},
{
"name": "Apr",
"value": 1652
},
{
"name": "May",
"value": 1485
}
];
<file_sep>import { Component, OnInit } from "@angular/core";
import { ActivatedRoute } from "@angular/router";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CartService } from "src/app/core-modules/services/cart/cart-state/cart.service";
import { distinctUntilChanged } from "rxjs/operators";
import { Location } from "@angular/common";
@Component({
selector: "app-product-detail",
templateUrl: "./product-detail.page.html",
styleUrls: ["./product-detail.page.scss"],
})
export class ProductDetailPage implements OnInit {
cartItemCount;
product;
constructor(
private route: ActivatedRoute,
private _productFacade: ProductFacadeService,
private cartService: CartService,
private _location: Location
) {}
ngOnInit() {
//get produuct by id
this.cartItemCount = this.cartService.getCartItemCount();
let id = this.route.snapshot.paramMap.get("id");
this._productFacade.loadCurrentProduct(id);
this.product = this._productFacade
.getCurrentProduct()
.pipe(distinctUntilChanged());
}
goBack(){
this._location.back();
}
}
<file_sep>const Validator = require("validator");
const isEmpty = require("is-empty");
module.exports = function validateLoginInput(data) {
let errors = {};
// Check if data exists, if not declare empty string
data.email = !isEmpty(data.email) ? data.email : "";
data.password = !isEmpty(data.password) ? data.password : "";
// Email Validation
if (Validator.isEmpty(data.email)) {
errors.email = "Email field is required";
} else if (!Validator.isEmail(data.email)) {
errors.email = "Email is invalid";
}
// Password Validation
if (Validator.isEmpty(data.password)) {
errors.password = "Password field is required";
}
if (!Validator.isLength(data.password, { min: 6, max: 30 })) {
errors.password = "Password must be at least 6 characters";
}
//Satinize input noise
//email
data.email = Validator.trim(data.email);
data.email = Validator.escape(data.email);
data.email = Validator.normalizeEmail(data.email);
//password
data.password = Validator.trim(data.password);
data.password = Validator.escape(data.password);
return {
errors,
isValid: isEmpty(errors),
};
};
<file_sep>import { Component, OnInit } from "@angular/core";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { MenuStateService } from "src/app/core-modules/services/menus/menu-state/menu-state.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
@Component({
selector: "app-layout",
templateUrl: "./layout.page.html",
styleUrls: ["./layout.page.scss"],
})
export class LayoutPage implements OnInit {
profile$;
id;
constructor(
public menuState: MenuStateService,
private userFacade: UserFacadeService,
private productFacade: ProductFacadeService,
) { }
ngOnInit() {
//Load Graph data //todo
// load approppriate menu
this.menuState.updateMenuStatus("admin");
this.menuState.loggedIn();
//get profile
this.userFacade.getUser$().subscribe(
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
},
(err) => {
console.log(err);
}
);
}
changeStore(val) {
let id = { id: val };
this.productFacade.loadStoreProducts(id);
}
}
<file_sep>import { Component, OnInit, EventEmitter, Output, Input } from "@angular/core";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { Router } from "@angular/router";
import { PopoverController } from '@ionic/angular';
import {QueryComponent} from './query/query.component'
@Component({
selector: "app-orders",
templateUrl: "./orders.component.html",
styleUrls: ["./orders.component.scss"],
})
export class OrdersComponent implements OnInit {
@Output() notifyParent: EventEmitter<any> = new EventEmitter();
//Observable,
public purchases_segment = "active";
public onItsWay;
public Delivered;
public processed;
panelOpenState = false;
orderList$;
public step1;
active_order;
constructor(
private orderFacade: OrderFacadeService,
private headerState: HeaderStateService,
private router: Router,
public popoverController: PopoverController,
) {}
ngOnInit() {
this.orderFacade.loadUserOrders();
this.orderList$ = this.orderFacade.getUserOrders$();
console.log(this.orderList$);
}
show(data) {
console.log(data);
}
segmentChanged(data) {
this.purchases_segment = data.detail.value;
console.log(this.purchases_segment);
}
return(item, order) {
this.headerState.setDataPassed({ product: item, order: order });
this.router.navigate(["/user/return"]);
}
countDown(commit_date) {
let dateSent = new Date();
let currentDate = new Date(commit_date);
return Math.floor(
(Date.UTC(
currentDate.getFullYear(),
currentDate.getMonth(),
currentDate.getDate()
) -
Date.UTC(
dateSent.getFullYear(),
dateSent.getMonth(),
dateSent.getDate()
)) /
(1000 * 60 * 60 * 24)
);
}
activeOrder(order){
if(order == this.active_order ){
this.active_order = null
}else{
this.active_order = order
}
}
orderProgress(order) {
//first check if it has commit date
let x;
if(order.commit_date){
//check delivery ready start
if(order.delivery_ready){
//check delivery sattus start
if(order.delivery_status =="Delivery on the Way" ){
//check if its fullfiled start
if(order.fullfilled){
x = "100%"
}else{
x = '50%'
}
//check if its fullfiled end
}else{
x = '10%'
}
//check delivery sattus end
}else{
x = '0%'
}
//check delivery ready end
}else{
x = '0%'
}
return x
}
//checks if the order is processed
isOrderProcessed(order){
let x;
if (order.commit_date){
//check if its processed
if(order.delivery_ready){
x ='active'
console.log('here erev')
}else{
x = 'pending_commit heartbeat'
}
}else{
x = ' being_processed'
}
return x
}
//check if order is on its way
isOrderOnTheWay(order){
let x
if(order.delivery_status == 'Delivery on the Way'){
x ='active'
}else{
x = 'being_processed '
}
return x
}
isOrderDelivered(order){
let y
if(order.fullfilled){
y = 'active'
}else{
y = 'being_processed'
}
return y
}
async presentPopover(ev: any) {
const popover = await this.popoverController.create({
component: QueryComponent,
cssClass: 'my-custom-class',
event: ev,
translucent: true
});
return await popover.present();
}
returnedProducts(order){
let x;
for(let i=0; i <order.items;i++){
if(order.items[i].return){
x = x +1
}
console.log(x)
}
return x
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { PaymentApiService } from "./payment-api.service";
describe("PaymentApiService", () => {
let service: PaymentApiService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(PaymentApiService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, OnInit } from "@angular/core";
import { Router } from "@angular/router";
import { MenuStateService } from "src/app/core-modules/services/menus//menu-state/menu-state.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
@Component({
selector: "app-footer",
templateUrl: "./footer.component.html",
styleUrls: ["./footer.component.scss"],
})
export class FooterComponent implements OnInit {
constructor(
public menuState: MenuStateService,
private router: Router,
private userFacade: UserFacadeService
) {}
ngOnInit() {}
goMarketPlace() {
this.menuState.updateMenuStatus("user");
this.router.navigate(["/user/landing"]);
}
gotoSettings() {
localStorage.setItem("st", this.userFacade.getCurrentStore()._id);
this.router.navigate(["/admin-store/store/settings"]);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
@Injectable({
providedIn: "root",
})
export class ProfileStateService {
private updating$ = new BehaviorSubject<boolean>(false);
private user$ = new BehaviorSubject<any>({});
private store$ = new BehaviorSubject<any>({});
private current_store = new BehaviorSubject<any>({});
private selected_store_edit = new BehaviorSubject<any>({});
constructor() {}
getCurrentStore() {
return this.current_store.value;
}
setCurrentStore(data) {
this.current_store.next({});
return this.current_store.next(data);
}
getSelectedStore() {
return this.selected_store_edit.value;
}
setSelectedStore(data) {
console.log(data);
this.selected_store_edit.next({});
return this.selected_store_edit.next(data);
}
resetSelectedStore() {
this.selected_store_edit.next({});
}
//reset
resetUserObs() {
this.user$.next({});
this.store$.next({});
this.current_store.next({});
}
// return updating$ status
isUpdating$() {
return this.updating$.asObservable();
}
// change updating$ status
setUpdating(isUpdating: boolean) {
this.updating$.next(isUpdating);
}
// return user$ state
getUser$() {
return this.user$.asObservable();
}
// Load new set of user
setUser(user) {
this.user$.next(user);
}
//set up store profile
getStoreProfile$() {
return this.store$.asObservable();
}
// Load new set of user
setStoreProfile(store) {
this.store$.next({});
this.store$.next(store);
}
// add new user to user$ state
addUser(user) {
const currentValue = this.user$.getValue();
this.user$.next([...currentValue, user]);
}
// update user in user$ state
updateUser(updatedUser) {
const user = this.user$.getValue();
const indexOfUpdated = user.findIndex((user) => {
user.id === updatedUser.id;
});
user[indexOfUpdated] = updatedUser;
this.user$.next([...user]);
}
// remove user from user$
removeUser(userRemove) {
const currentValue = this.user$.getValue();
this.user$.next(currentValue.filter((user) => user !== userRemove));
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Location } from "@angular/common";
import { ProductStateService } from 'src/app/core-modules/services/products/product-state/product-state.service';
import { ProductFacadeService } from 'src/app/core-modules/services/products/product-facade/product-facade.service';
import { Router } from '@angular/router';
@Component({
selector: 'app-see-all-products-home',
templateUrl: './see-all-products-home.page.html',
styleUrls: ['./see-all-products-home.page.scss'],
})
export class SeeAllProductsHomePage implements OnInit {
constructor(
private _location: Location,
private productState: ProductStateService,
private _productFacade: ProductFacadeService,
private router: Router,
) { }
ngOnInit() {
this._productFacade.getProducts$().subscribe(
(res) => {
if(res.length > 0){
null
}else{
this._productFacade.loadProducts();
this._productFacade.getProducts$().subscribe(
(res) => {
this.productState.setMainProducts(res);
this.productState.setSearchingProducts(res);
this.router.navigate(["/landing/all-products"]);
},
(err) => {console.log(err)}
);
}
},
(err) => {console.log(err)}
);
}
goBack() {
this._location.back();
}
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class ProfileApiService {
_urlUserProfile = `${this.env.apiUrl}/api/profile`;
_urlUserUpdate = `${this.env.apiUrl}/api/profile/update`;
_urlUserRemove = `${this.env.apiUrl}/api/profile/remove`;
_urlStoreRegister = `${this.env.apiUrl}/api/store/register`;
_urlStoreUpdate = `${this.env.apiUrl}/api/store/store_update`;
_urlProfileRegister = `${this.env.apiUrl}/api/store/profiling`;
_urlStoreProfile = `${this.env.apiUrl}/api/store/store_profile`;
_urlStoreRemoveAdmin = `${this.env.apiUrl}/api/store/remove_admin`;
_urlStoreUpdateAdmin = `${this.env.apiUrl}/api/store/update_admin`;
_urlUpdateStoreProfile = `${this.env.apiUrl}/api/store/update_return_policy`;
_urlUpdateStoreLikes = `${this.env.apiUrl}/api/store/like_store`;
_urlUpdateStoreLogo = `${this.env.apiUrl}/api/store/update_logo`;
_urlUpdateProfilePicture = `${this.env.apiUrl}/api/store/update_profile_image`;
constructor(private _http: HttpClient, private env: EnvService) {}
getUser() {
return this._http.get<any>(this._urlUserProfile);
}
updateStoreProfile(data) {
return this._http.post<any>(this._urlUpdateStoreProfile, data);
}
updateProfileImage(data) {
return this._http.post<any>(this._urlUpdateProfilePicture, data);
}
updateStoreLogo(data) {
return this._http.post<any>(this._urlUpdateStoreLogo, data);
}
updateUser(data) {
return this._http.put<any>(this._urlUserUpdate, data);
}
updateStoreLike(data) {
return this._http.post<any>(this._urlUpdateStoreLikes, data);
}
removeUser(id) {
return this._http.post<any>(this._urlUserRemove, id);
}
// this should be under auth api
storeRegister(data) {
return this._http.post<any>(this._urlStoreRegister, data);
}
// this should be under auth api
storeProfiling(data) {
return this._http.post<any>(this._urlProfileRegister, data);
}
storeProfile(data) {
return this._http.post<any>(this._urlStoreProfile, data);
}
storeRemoveAdmin(data) {
return this._http.post<any>(this._urlStoreRemoveAdmin, data);
}
storeUpdateAdmin(data) {
return this._http.post<any>(this._urlStoreUpdateAdmin, data);
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ModalController } from "@ionic/angular";
import { Router } from "@angular/router";
import { StoreRegisterPage } from "./store-register/store-register.page";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
@Component({
selector: "app-welcome",
templateUrl: "./welcome.page.html",
styleUrls: ["./welcome.page.scss"],
})
export class WelcomePage implements OnInit {
register_store;
registeredUserData;
constructor(
private modalController: ModalController,
private router: Router,
private headerState: HeaderStateService
) {}
ngOnInit() {
this.registeredUserData = history.state.data;
this.register_store = history.state.rs;
}
next() {
this.router.navigate(["/user/landing/home"]);
this.headerState.setReturnRoute("none");
}
storeRegister() {
this.router.navigate(["/auth/user/store-register"]);
this.headerState.setReturnRoute("none");
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ActivityComponent } from './activity/activity.component';
import { AdminUsersComponent } from './admin-users/admin-users.component';
import { PrivilegesComponent } from './privileges/privileges.component';
import {MatExpansionModule} from '@angular/material/expansion';
import { AdUserComponent } from "./ad-user/ad-user.component";
import { FormsModule } from '@angular/forms';
import { ReactiveFormsModule } from '@angular/forms';
import { RouterModule } from '@angular/router';
import { IonicModule } from '@ionic/angular';
@NgModule({
declarations: [
ActivityComponent,
AdminUsersComponent,
PrivilegesComponent,
AdUserComponent,
],
imports: [
CommonModule,
RouterModule,
IonicModule,
MatExpansionModule,
ReactiveFormsModule,
FormsModule,
],
exports: [
ActivityComponent,
AdminUsersComponent,
PrivilegesComponent,
AdUserComponent,
],
})
export class UsersComponentModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { single } from './data';
import { ChartType } from 'chart.js';
import { MultiDataSet, Label } from 'ng2-charts';
@Component({
selector: 'app-bar-chart',
templateUrl: './bar-chart.component.html',
styleUrls: ['./bar-chart.component.scss'],
})
export class BarChartComponent implements OnInit {
// Doughnut
public doughnutChartLabels: Label[] = ['Download Sales', 'In-Store Sales', 'Mail-Order Sales0','Download Sales',];
public doughnutChartData: MultiDataSet = [
[350, 450, 100,350,],
];
public doughnutChartType: ChartType = 'doughnut';
public chartColors: Array<any> = [
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C','#663399','#808080','#F5F5DC','#F0FFFF']
},
]
constructor() {
Object.assign(this, { single });
}
ngOnInit() {}
// events
public chartClicked({ event, active }: { event: MouseEvent, active: {}[] }): void {
console.log(event, active);
}
public chartHovered({ event, active }: { event: MouseEvent, active: {}[] }): void {
console.log(event, active);
}
}
<file_sep>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { IonicModule } from '@ionic/angular';
import { UserLikedComponent } from './user-liked.component';
describe('UserLikedComponent', () => {
let component: UserLikedComponent;
let fixture: ComponentFixture<UserLikedComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ UserLikedComponent ],
imports: [IonicModule.forRoot()]
}).compileComponents();
fixture = TestBed.createComponent(UserLikedComponent);
component = fixture.componentInstance;
fixture.detectChanges();
}));
it('should create', () => {
expect(component).toBeTruthy();
});
});
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient, HttpHeaders } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class PaymentApiService {
_urlTempOrder = `${this.env.apiUrl}/api/payfast/create_order`;
constructor(private _http: HttpClient, private env: EnvService) {}
tempOrder(data) {
return this._http.post<any>(this._urlTempOrder, data);
}
}
<file_sep>const bcrypt = require("bcryptjs");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const Store = require("../../../models/store/Store");
// Access Email Verification Method
const emailAuth = require("../emailAuth/verifyEmail");
module.exports = function userRegister(Model, userInput, req, res) {
const newUser = new Model({
email: userInput.email,
user_agreement: userInput.user_agreement,
password: user<PASSWORD>,
profileImage: userInput.profileImage,
});
// Hash password before saving in database
bcrypt.genSalt(10, (err, salt) => {
bcrypt.hash(newUser.password, salt, (err, hash) => {
if (err) throw err;
const payload = {
id: newUser.id,
email: newUser.email,
store_id: newUser.store_id,
};
// Create a token
jwt.sign(
payload,
keys.secretOrKey,
{
expiresIn: 3901556926, // 100 years
},
(err, token) => {
newUser.verified.token = token;
}
);
newUser.password = <PASSWORD>;
// admin is adding a user
//check if the user is registering or being added
if (userInput.role) {
///if user is being added, push data to their my_stores property
newUser.storeOwner = true;
newUser.my_stores.push({
...userInput,
store_id: userInput.store_id,
});
Store.findOne({ _id: userInput.store_id }).then((store) => {
store.users.push(userInput);
store.save();
});
}
//regisster new user
newUser
.save()
.then((user) => {
// Send Email
emailAuth(userInput.password, user, req, res);
})
.catch((err) => console.log(err));
});
});
};
<file_sep>const router = require("express").Router();
// Authenticate user middleware
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck")
const User = require("../../../models/users/User");
/***only logged in user can access below api's***/
router.get('/', authCheck, (req, res) => {
//Logged in with passport strategy
if(req.googleUser){
res.json({
message: "googleUser: You are logged in !",
user:req.googleUser
});
//logged in with token method
}else if (req.authData){
User.findOne({email:req.authData.email}).then(user => {
if(user.storeOwner){
res.json({
message: "Admin: You are logged in !",
user:user
})
}else{
res.json({
message: "User: You are logged in !",
user:user
})
}
}).catch(err => err)
}else{
res.json({message: 'Fobbiden'});
}
});
module.exports = router;
<file_sep>import { TestBed } from "@angular/core/testing";
import { OrdersFacadeService } from "./orders-facade.service";
describe("OrdersFacadeService", () => {
let service: OrdersFacadeService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(OrdersFacadeService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { NgModule } from "@angular/core";
import { BrowserModule } from "@angular/platform-browser";
import { RouteReuseStrategy } from "@angular/router";
import { IonicModule, IonicRouteStrategy } from "@ionic/angular";
import { SplashScreen } from "@ionic-native/splash-screen/ngx";
import { FileTransfer, FileUploadOptions, FileTransferObject } from '@ionic-native/file-transfer/ngx';
import { StatusBar } from "@ionic-native/status-bar/ngx";
import { Camera } from "@ionic-native/camera/ngx";
import { File } from "@ionic-native/file/ngx";
import { HTTP } from "@ionic-native/http/ngx";
import { environment } from "../environments/environment";
import {
CloudinaryModule,
CloudinaryConfiguration,
} from "@cloudinary/angular-5.x";
import * as Cloudinary from "cloudinary-core";
import { AppComponent } from "./app.component";
import { AppRoutingModule } from "./app-routing.module";
import { BrowserAnimationsModule } from "@angular/platform-browser/animations";
import { ChartistModule } from "ng-chartist";
//native
//Import Modules and include them
import { HttpClientModule, HTTP_INTERCEPTORS } from "@angular/common/http";
import { TokenInterceptorService } from "./core-modules/interceptors/auth/token-interceptor.service";
import { LayoutPageModule } from "./modules/admin-store/layout/layout.module";
import { LandingPageModule } from "./modules/user/landing/landing.module";
import { PaymentPageModule } from "./modules/payments/payment/payment.module";
import { LayoutAuthPageModule } from "./modules/auth/layout/layout.module";
import { MenusModule } from "./modules/menus/menus.module";
import { MatGoogleMapsAutocompleteModule } from "@angular-material-extensions/google-maps-autocomplete";
import { AgmCoreModule } from "@agm/core";
// 1. Import the libs you need
import { AngularFirestoreModule } from '@angular/fire/firestore';
import { AngularFireStorageModule } from '@angular/fire/storage';
import { AngularFireAuthModule } from '@angular/fire/auth';
import { AngularFireModule } from '@angular/fire';
//angular firebase 2
/*import { AngularFireStorageModule } from 'angularfire2/storage';*/
const config = {
apiKey: "<KEY>",
authDomain: "cartalist-20.firebaseapp.com",
databaseURL: "https://cartalist-20.firebaseio.com",
projectId: "cartalist-20",
storageBucket: "cartalist-20.appspot.com",
messagingSenderId: "1080994665787",
appId: "1:1080994665787:web:e51dcae7d8cdd6405f931e",
measurementId: "G-FPKKRHBEKF"
};
@NgModule({
declarations: [AppComponent],
entryComponents: [],
imports: [
BrowserModule,
IonicModule.forRoot(),
AppRoutingModule,
HttpClientModule,
BrowserAnimationsModule,
LayoutPageModule,
LandingPageModule,
PaymentPageModule,
MenusModule,
LayoutAuthPageModule,
//Cloudinary config
CloudinaryModule.forRoot(Cloudinary, {
cloud_name: "dzrx9mvzy",
} as CloudinaryConfiguration),
//Google Api
AgmCoreModule.forRoot({
apiKey: "<KEY>",
libraries: ["places"],
}),
MatGoogleMapsAutocompleteModule,
ChartistModule,
// 3. Initialize
AngularFireModule.initializeApp(config),
AngularFirestoreModule, // firestore
AngularFireAuthModule, // auth
AngularFireStorageModule // storage
],
providers: [
StatusBar,
SplashScreen,
Camera,
File,
FileTransfer,
FileTransferObject,
{ provide: RouteReuseStrategy, useClass: IonicRouteStrategy },
{
provide: HTTP_INTERCEPTORS,
useClass: TokenInterceptorService,
multi: true,
},
HTTP,
],
bootstrap: [AppComponent],
})
export class AppModule { }
<file_sep>import { Injectable } from "@angular/core";
import { ToastController } from "@ionic/angular";
import { CartService } from "src/app/core-modules/services/user/cart/cart-state/cart.service";
import { VoucherApiService } from "src/app/core-modules/services/shared/coupons/voucher-api/voucher-api.service";
import { ProductApiService } from "../../products/product-api/product-api.service";
@Injectable({
providedIn: "root",
})
export class CartFacadeService {
constructor(
private cartState: CartService,
private voucherApi: VoucherApiService,
public toastController: ToastController,
private productApi: ProductApiService
) {}
async presentToast(msg) {
const toast = await this.toastController.create({
color: "secondary",
message: msg,
duration: 2500,
});
toast.present();
}
async addToCart(product) {
console.log("ths here");
const p = { ...product };
console.log(p);
//check if original product in stock
this.productApi.getProduct({ id: p._id }).subscribe((res) => {
if (res.product[0].in_stock > 0) {
if (p.sale_type == "combo") {
this.productApi
.getProduct({ id: p.secondary_product._id })
.subscribe((sec) => {
if (sec.product[0].in_stock > 0) {
this.addingToCart(p);
} else {
this.presentToast(
`${p.secondary_product.productName} is sold out!`
);
return;
}
});
} else if (p.sale_type == "buy1get1free") {
this.productApi.getProduct({ id: p._id }).subscribe((sec) => {
if (sec.product[0].in_stock >= p.amount) {
this.addingToCart(p);
} else {
this.presentToast(`${p.productName} stock volume is low!`);
return;
}
});
} else if (p.sale_type == "volume") {
this.productApi.getProduct({ id: p._id }).subscribe((sec) => {
if (sec.product[0].in_stock >= p.amount) {
this.addingToCart(p);
} else {
this.presentToast(`${p.productName} stock volume is low!`);
return;
}
});
} else {
this.addingToCart(p);
}
} else {
console.log(p);
this.presentToast(`${res.product[0].productName} is sold out!`);
return;
}
});
}
addingToCart(p) {
let pack = p.productName;
console.log(p);
if (p.secondary_product) {
console.log(p);
pack = `${p.productName} and ${p.secondary_product.productName}`;
}
this.presentToast(`${pack} added to cart`);
switch (p.promo_kind) {
case "itemDiscount":
this.cartState.addProduct(p);
break;
case "buy1get1free":
this.cartState.addProduct(p);
break;
case "volume":
this.cartState.addProduct(p);
break;
case "combo":
this.cartState.addProduct(p);
break;
default:
//if not modified for sale check if product has sale
/*if (p.sale) {
this.saleCheck(p);
} else {
this.cartState.addProduct(p);
}*/
this.cartState.addProduct(p);
break;
}
}
//intercept products and modify products
saleCheck(current_p) {
//check if product has voucher
let item = current_p;
if (item.sale) {
//check ssale id and get voucher
item.sale_type.forEach((sale_id) => {
console.log(sale_id);
this.voucherApi.getVoucher({ id: sale_id }).subscribe(
(res) => {
console.log(res);
let promo = res.voucher;
for (let p of promo.products) {
//if product in promo is the same as the one added to cart
if (p.p_id == item._id) {
let type = promo.type;
let discount = p.discount;
let product = p.prod;
let items_exceeding = p.items_exceeding;
let quota = p.p_quota;
let secondary_product = p.s_pro;
let modified_price;
let product_bind;
switch (type) {
case "itemDiscount":
modified_price = product.productPrice - discount;
this.modifyProduct(
null,
product,
modified_price,
product.amount,
type,
null,
null
);
break;
case "buy1get1free":
this.modifyProduct(
null,
product,
product.productPrice,
product.amount,
type,
null,
null
);
break;
case "volume":
let sub_total =
product.productPrice * items_exceeding - discount;
let new_item_price = sub_total / items_exceeding;
this.modifyProduct(
null,
product,
new_item_price,
items_exceeding,
type,
null,
null
);
break;
case "combo":
product_bind = [product._id, secondary_product._id];
modified_price = secondary_product.productPrice - discount;
//minify this script
let secondary_product_b = {
...secondary_product,
};
(secondary_product_b.productPrice = modified_price),
(secondary_product_b.original_price =
secondary_product.productPrice),
(secondary_product_b.storeId = secondary_product.storeId),
(secondary_product_b.product_bind = product_bind),
(secondary_product_b.promo_kind = type),
(secondary_product_b.thresh = secondary_product.amount),
this.modifyProduct(
null,
product,
product.productPrice,
product.amount,
type,
product_bind,
secondary_product_b
);
break;
}
}
}
},
(err) => {}
);
});
} else {
return item;
}
}
//modify the products and convert to voucher prices
modifyProduct(
sec,
product,
modified_price,
amount,
promo_kind,
product_bind,
secondary_product
) {
product.sec = sec;
product.amount = amount;
product.original_price = product.productPrice;
product.productPrice = modified_price;
product.sale = true;
product.sale_type = promo_kind;
product.product_bind = product_bind;
product.promo_kind = promo_kind;
product.secondary_product = secondary_product;
product.thresh = amount;
let p = product;
switch (p.promo_kind) {
case "itemDiscount":
this.cartState.addProduct(p);
break;
case "buy1get1free":
p.original_price = p.productPrice * 2;
p.amount = p.amount * 2;
this.cartState.addProduct(p);
//minify this script
/*let mP = {
amount: p.amount,
category: p.category,
productDescription: p.productDescription,
productImage: p.productImage,
productName: p.productName,
storeId: p.storeId,
product_bind: p.product_bind,
promo_kind: p.promo_kind,
_id: p._id,
sale_type: p.promo_kind,
original_price: p.productPrice,
productPrice: 0,
sale: true,
thresh: 1,
};
this.cartState.addProduct(mP);*/
break;
case "volume":
this.cartState.addProduct(p);
break;
case "combo":
this.cartState.addProduct(p);
//this.cartState.addProduct(secondary_product);
break;
default:
this.cartState.addProduct(p);
break;
}
}
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient, HttpHeaders } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class AuthApiService {
_urlUserRegister = `${this.env.apiUrl}/api/user/register`;
_urlUserLogin = `${this.env.apiUrl}/api/user/login`;
_urlUpdateUser = `${this.env.apiUrl}/api/user/update_user_details`;
_urlUserVerify = `${this.env.apiUrl}/api/verify`;
_urlVerifyAgain = `${this.env.apiUrl}/api/verify/resend`;
_urlForgotPassword = `${this.env.apiUrl}/api/forgotPassword/resetLink`;
_urlResetPassword = `${this.env.apiUrl}/api/forgotPassword/resetPassword`;
_urlUpdatePassword = `${this.env.apiUrl}/api/forgotPassword/updatePassword`;
_urlAdminRegister = `${this.env.apiUrl}/api/user/admin_register`;
_urlGetVendorStores = `${this.env.apiUrl}/api/store/get_vendor_stores`;
_urlActivateFreeTrial = `${this.env.apiUrl}/api/store/activate_free_trial`;
_urlDeleteProfile = `${this.env.apiUrl}/api/user/delete_profile`;
_urlDeleteStoreProfile = `${this.env.apiUrl}/api/user/delete_store_profile`;
_urlCheckStatus = `${this.env.apiUrl}/api/user/check_for_removal`;
constructor(private _http: HttpClient, private env: EnvService) {}
userRegister(userData) {
return this._http.post<any>(this._urlUserRegister, userData);
}
checkStatus(data) {
return this._http.post<any>(this._urlCheckStatus, data);
}
deleteAccount() {
return this._http.get<any>(this._urlDeleteProfile);
}
deleteStoreAccount(data) {
return this._http.post<any>(this._urlDeleteStoreProfile, data);
}
activateFreeTrial(data) {
return this._http.post<any>(this._urlActivateFreeTrial, data);
}
getVendorStores() {
return this._http.get<any>(this._urlGetVendorStores);
}
adminRegister(data) {
return this._http.post<any>(this._urlAdminRegister, data);
}
userUpdate(data) {
return this._http.post<any>(this._urlUpdateUser, data);
}
userLogin(userData) {
return this._http.post<any>(this._urlUserLogin, userData);
}
verifyUser(userData) {
return this._http.post<any>(this._urlUserVerify, userData);
}
verifyAgain() {
return this._http.get<any>(this._urlVerifyAgain);
}
forgotPassword(userData) {
return this._http.post<any>(this._urlForgotPassword, userData);
}
resetPassword(userData) {
return this._http.post<any>(this._urlResetPassword, userData);
}
updatePassword(userData) {
return this._http.post<any>(this._urlUpdatePassword, userData);
}
loggedIn() {
return !!localStorage.getItem("token");
}
getToken() {
return localStorage.getItem("token");
}
}
<file_sep>import { Component, OnInit, Input } from "@angular/core";
import { ActivatedRoute, Router } from "@angular/router";
import { ProductsFacadeService } from "src/app/core-modules/services/admin/products/products-facade/products-facade.service";
import { CartFacadeService } from "src/app/core-modules/services/user/cart/cart-facade/cart-facade.service";
import { CartService } from "src/app/core-modules/services/user/cart/cart-state/cart.service";
import { HeaderStateService } from "src/app/core-modules/services/utils/header-state/header-state.service";
@Component({
selector: "app-store-products",
templateUrl: "./store-products.component.html",
styleUrls: ["./store-products.component.scss"],
})
export class StoreProductsComponent implements OnInit {
@Input() count: number;
public trackIndex;
public products$;
constructor(
private cartFacade: CartFacadeService,
private _productFacade: ProductsFacadeService,
private headerState: HeaderStateService,
private cartService: CartService,
private router: Router
) {}
ngOnInit() {
this.products$ = this._productFacade.getCurrentStoreProducts();
console.log(this.products$);
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
}
<file_sep>import { AfterViewInit, Component, OnInit } from "@angular/core";
import { Router } from "@angular/router";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { distinctUntilChanged } from "rxjs/operators";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import {PopoverComponent} from"src/app/modules/admin-store/shared/components/popover/popover.component"
import { PopoverController } from '@ionic/angular';
@Component({
selector: "app-dashboard",
templateUrl: "./dashboard.page.html",
styleUrls: ["./dashboard.page.scss"],
})
export class DashboardPage implements OnInit, AfterViewInit {
item: any = "";
datas: string = "Users";
message: string;
device_screen;
users: boolean = true;
sales: boolean = false;
store;
orders$;
products$;
sales_value = 3567;
public viewReports = false;
public dashStatus = "dash";
actvated_tab = "dashboard";
constructor(
private router: Router,
private productFacade: ProductFacadeService,
breakpointObserver: BreakpointObserver,
private userFacade: UserFacadeService,
private orderFacade: OrderFacadeService,
private headerStateService: HeaderStateService,
public popoverController: PopoverController
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnInit() {
this.userFacade.getStoreProfile$().subscribe(
(res) => {
this.store = res;
},
(err) => {
console.log(err);
}
);
console.log(this.store)
this.getOrders();
this.getProducts();
}
ngAfterViewInit() {
}
//pop over fuction
async presentPopover(ev: any) {
const popover = await this.popoverController.create({
component: PopoverComponent,
cssClass: 'my-custom-class',
event: ev,
translucent: true
});
console.log(ev)
return await popover.present();
}
getOrders() {
//get current store
let store = this.userFacade.getCurrentStore();
//load store profile and subscribe store products
if (Object.keys(store).length === 0) {
this.userFacade.getUser$().subscribe(
//load default store
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
this.orderFacade.loadStoreOrdersById(res.store_id);
this.orders$ = this.orderFacade
.getStoreOrders$()
.pipe(distinctUntilChanged());
},
(err) => {
console.log(err);
}
);
} else {
this.userFacade.loadStoreProfile(store._id);
this.orderFacade.loadStoreOrdersById(store._id);
this.orders$ = this.orderFacade
.getStoreOrders$()
.pipe(distinctUntilChanged());
}
console.log(this.orders$)
}
accept(selectedData) {
this.datas = selectedData;
if (selectedData == "users") {
this.sales = false;
this.users = true;
} else if (selectedData == "sales") {
this.users = false;
this.sales = true;
}
}
goProduct() {
this.productFacade.loadVendorProducts();
this.router.navigate(["/admin-store/store/products"]);
}
getProducts() {
//get current store
let store = this.userFacade.getCurrentStore();
//load store profile and subscribe store products
if (Object.keys(store).length === 0) {
this.userFacade.getUser$().subscribe(
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
this.productFacade.loadStoreProducts(res.store_id);
this.products$ = this.productFacade
.getCurrentStoreProducts()
.pipe(distinctUntilChanged());
},
(err) => {
console.log(err);
}
);
} else {
this.userFacade.loadStoreProfile(store._id);
this.productFacade.loadStoreProducts(store._id);
this.products$ = this.productFacade
.getCurrentStoreProducts()
.pipe(distinctUntilChanged());
}
console.log( this.products$ )
}
reportSelection(data) {
this.viewReports = data;
if (data == true) {
this.dashStatus = "reports"
} else {
this.dashStatus = "dash"
}
}
orderValue(order){
let y = 0
for (let i= 0; i < order.items.length; i++) {
let x = order.items[i].productPrice
y = y + x
}
return y
}
navigateTo(route,data_pass){
if(route == "/admin-store/store/products" ){
setTimeout(() => {
this.productFacade.loadVendorProducts();
this.headerStateService.desktop_data_pass = data_pass
this.router.navigate(["/admin-store/store/products"]);
this.actvated_tab ='dashboard';
}, 250);
}else{
setTimeout(() => {
this.headerStateService.desktop_data_pass = data_pass
this.router.navigate([route])
this.actvated_tab ='dashboard'
}, 250);
}
}
setDesktopSideMenuStateValue(data){
this.headerStateService.setDesktopSideMenuState(data)
}
selectThisOrder(order){
this.headerStateService.setDashActiveOrderSelect(order)
}
}
<file_sep>import { Injectable } from "@angular/core";
import { ActivityApiService } from "src/app/core-modules/services/shared/activities/activity-api/activity-api.service";
import { ActivityStateService } from "src/app/core-modules/services/shared/activities/activity-state/activity-state.service";
@Injectable({
providedIn: "root",
})
export class ActivityFacadeService {
constructor(
private activityApi: ActivityApiService,
private activityState: ActivityStateService
) {}
isUpdating$() {
return this.activityState.isUpdating$();
}
///return activity$ state
getStoreSignal() {
return this.activityState.getStoreSignal();
}
// Load new set of activity
setStoreSignal(value) {
this.activityState.setStoreSignal(value);
}
getActivities$() {
return this.activityState.getActivities$();
}
pushActivity(data) {
this.activityState.setUpdating(true);
this.activityApi.pushActivity(data).subscribe(
(res) => {
this.activityState.addActivity(data);
},
(err) => {
console.log(err);
}
);
}
loadActivities(id) {
this.activityState.setUpdating(true);
this.activityApi.getActivities({ id: id }).subscribe(
(res) => {
let activities = res.activities;
this.activityState.setActivity(activities);
},
(err) => {
console.log(err);
},
() => this.activityState.setUpdating(false)
);
}
removeActivity(activity) {
//change the state
this.activityState.removeActivity(activity);
//update database
let id = { id: activity._id };
}
/////****Notification ***//// */
getNotifications$() {
return this.activityState.getNotifications$();
}
pushNotification(data) {
this.activityState.setUpdating(true);
this.activityApi.pushNotification(data).subscribe(
(res) => {
this.activityState.addNotification(data);
},
(err) => {
console.log(err);
}
);
}
updateNotifications(data) {
this.activityApi.updateNotifications(data).subscribe(
(res) => {
console.log(res);
},
(err) => {
console.log(err);
}
);
}
loadNotifications() {
this.activityState.setUpdating(true);
this.activityApi.getNotifications().subscribe(
(res) => {
console.log(res);
let activities = res.notifications;
this.activityState.setNotification(activities);
},
(err) => {
console.log(err);
},
() => this.activityState.setUpdating(false)
);
}
//reports
updateReport(data) {
//unit sold
return this.activityApi.updateReports({ data: data });
}
}
<file_sep>import { Component, OnInit ,HostListener } from '@angular/core';
import { ActionSheetController } from '@ionic/angular';
@Component({
selector: 'app-reports',
templateUrl: './reports.page.html',
styleUrls: ['./reports.page.scss'],
})
export class ReportsPage implements OnInit {
public segment;
scrHeight:any;
scrWidth:any;
public landScape;
@HostListener('window:resize', ['$event'])
getScreenSize(event?) {
this.scrHeight = window.innerHeight;
this.scrWidth = window.innerWidth;
if(this.scrHeight > this.scrWidth ){
this.landScape = false
}else{
this.landScape = true
}
}
public innerWidth
constructor( public actionSheetController: ActionSheetController ) {
this.getScreenSize();
}
ngOnInit() {
this.innerWidth = window.innerWidth
console.log(this.innerWidth)
}
async presentActionSheet() {
const actionSheet = await this.actionSheetController.create({
header: 'Chart-type',
cssClass: 'my-custom-class',
buttons: [{
text: 'Donaut chart',
icon: 'trash',
handler: () => {
this.segment = 'bar-chart' ;
}
}, {
text: 'line Graph',
icon: 'share',
handler: () => {
this.segment = 'line-graph' ;
}
}, {
text: 'Dynamic',
icon: 'caret-forward-circle',
handler: () => {
this.segment = 'dynamic' ;
}
}, {
text: 'pie chart',
icon: 'heart',
handler: () => {
this.segment = 'pie-chart';
}
},
{
text: 'polar chart',
icon: 'heart',
handler: () => {
this.segment = 'polar-chart';
}
},
{
text: 'rader Chart',
icon: 'heart',
handler: () => {
this.segment = 'rader-chart';
}
},
{
text: 'Cancel',
icon: 'close',
role: 'cancel',
handler: () => {
console.log('Cancel clicked');
}
}]
});
await actionSheet.present();
}
}
<file_sep>import {
Component,
OnInit,
Output,
EventEmitter,
AfterViewInit,
OnChanges,
Input,
SimpleChanges,
OnDestroy,
} from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { LoadingController } from "@ionic/angular";
import {
FormGroup,
FormArray,
FormBuilder,
Validators,
FormControl,
} from "@angular/forms";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { browserRefresh } from "src/app/app.component";
import { distinctUntilChanged } from "rxjs/operators";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-inventory",
templateUrl: "./inventory.component.html",
styleUrls: ["./inventory.component.scss"],
})
export class InventoryComponent implements OnInit, OnChanges, AfterViewInit {
edit;
clickButton;
active_expansion;
no_product
@Output() shareProduct = new EventEmitter();
@Output() segment = new EventEmitter();
//FormState
updateProduct: FormGroup;
status;
id;
@Input() inventory_products;
//observables
products$;
categories$;
isUpdating$;
device_screen;
detail_view_product;
constructor(
private userFacade: UserFacadeService,
private productFacade: ProductFacadeService,
private categoryFacade: CategoryFacadeService,
public loadingController: LoadingController,
private formBuilder: FormBuilder,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnChanges(changes: SimpleChanges) {
this.products$ = this.inventory_products.pipe(distinctUntilChanged());
}
ngOnInit() {
//getCategories
this.categories$ = this.categoryFacade.getCategories$();
//getproducts
setTimeout(() => {
this.products$.subscribe(
(res) => {
this.detail_view_product = res[0]
console.log(this.detail_view_product )
if (res == []) {
if (res == undefined || res.length < 1) {
this.no_product = true;
}
}
},
(err) => {
console.log(err);
}
);
}, 5000);
//form
this.updateProduct = this.formBuilder.group({
product_id: new FormControl(),
production_status: new FormControl(),
next_available_date: new FormControl(),
productName: new FormControl(),
selectCategory: new FormControl(),
productDescription: new FormControl(),
productImage: new FormControl(),
metaTags: new FormControl(), //this.formBuilder.array([this.createMetaTag()]),
productPrice: new FormControl(),
in_stock: new FormControl(),
sold: new FormControl(),
});
}
ngAfterViewInit() { }
expanded(p) {
let product = { ...p };
console.log(product);
// Patch the values to html
if (this.id == product._id) {
return null;
} else {
this.id = product._id;
this.updateProduct.patchValue({
product_id: product._id,
productName: product.productName,
selectCategory: product.category,
productDescription: product.productDescription,
productImage: product.productImage,
metaTags: product.metaTags,
productPrice: product.productPrice,
production_status: product.production_status,
next_available_date: product.next_available_date,
in_stock: product.in_stock,
sold: product.sold,
});
}
}
update() {
let store = this.userFacade.getCurrentStore();
this.edit = false;
let product = this.updateProduct.value;
product._id = product.product_id;
this.productFacade.updateProduct({ product: product, store_id: store._id });
this.clickButton = true;
}
editProduct() {
this.edit = true;
this.clickButton = true;
}
sendToParent() {
this.segment.emit("add_product");
}
// present loading
async presentLoading() {
const loading = await this.loadingController.create({
message: "Please wait...",
duration: 500,
});
await loading.present();
const { role, data } = await loading.onDidDismiss();
}
setDesktopProductView(product){
this.edit = true;
this.detail_view_product = product
}
}
<file_sep>import { Injectable } from "@angular/core";
import { ToastController } from "@ionic/angular";
import { ProductsApiService } from "../products-api/products-api.service";
import { ProductsStateService } from "../products-state/products-state.service";
@Injectable({
providedIn: "root",
})
export class ProductsFacadeService {
constructor(
public toastController: ToastController,
private productApi: ProductsApiService,
private productState: ProductsStateService
) {}
//return store products
getStoreProducts(id) {
return this.productApi.getStoreProductsById({ id: id });
}
//load current store products
loadStoreProducts(id) {
this.productApi.getStoreProductsById({ id: id }).subscribe(
(res) => {
let products = res.products;
this.productState.setCurrentStoreProducts(products);
},
(err) => {
console.log(err);
}
);
}
//get current store products as obs
getCurrentStoreProducts() {
return this.productState.getCurrentStoreProducts();
}
//on search
loadSearchedStoreProducts(val, id) {
this.productApi.getStoreProductsById({ id: id }).subscribe(
(res) => {
let products = res.products;
let searched = products.filter((item) => {
return item.productName.toLowerCase().indexOf(val.toLowerCase()) > -1;
});
this.productState.setCurrentStoreProducts(searched);
},
(err) => {
console.log(err);
}
);
}
//create product
createProduct(product) {
this.productApi.createProduct(product).subscribe(
(res) => {
let new_product = res.product;
// success callback - we have product generated by the server, let's update the state
this.productState.addProduct(new_product);
//update state
},
(error) => {
// error callback - we need to rollback the state change
console.log(error);
}
);
}
createBulkProducts(data) {
return this.productApi.createBulkProducts(data);
}
//update product
updateProduct(product) {
console.log(product);
this.productState.updateProduct(product.product);
this.productApi.updateProduct(product).subscribe(
(p) => {
this.productState.updateProduct(p.product);
this.presentToast(`${p.product.productName} is updated`);
},
(error) => {
this.presentToast(`Ooops something went wrong`);
}
);
}
//remove store product
removeProduct(product) {
//change the state
this.productState.removeProduct(product);
//update database
this.productApi.deleteProduct({ product: product }).subscribe(
(res) => {
this.productState.setCurrentStoreProducts(res.products);
this.presentToast(`${product.productName} is removed`);
},
(err) => {
//on err roll back
this.productState.addProduct(product);
}
);
}
//update image on product model
updateProductImage(uploaded_image) {
return this.productApi.updateProductImage(uploaded_image);
}
resetStoreProducts() {
return this.productState.resetStoreProducts();
}
//toast controller
async presentToast(msg) {
const toast = await this.toastController.create({
color: "secondary",
message: msg,
duration: 2500,
});
toast.present();
}
}
<file_sep>export var Data:any = [
{
"city": "Johannesburg",
"admin": "Gauteng",
"country": "South Africa",
"population_proper": "2026469",
"iso2": "ZA",
"capital": "admin",
"lat": "-26.205171",
"lng": "28.049815",
"population": "3435000"
},
{
"city": "Cape Town",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "2432858",
"iso2": "ZA",
"capital": "primary",
"lat": "-33.925839",
"lng": "18.423218",
"population": "3215000"
},
{
"city": "Benoni",
"admin": "Gauteng",
"country": "South Africa",
"population_proper": "605344",
"iso2": "ZA",
"capital": "",
"lat": "-26.190851",
"lng": "28.311338",
"population": "2986000"
},
{
"city": "Durban",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "2729000",
"iso2": "ZA",
"capital": "",
"lat": "-29.857896",
"lng": "31.029198",
"population": "2729000"
},
{
"city": "Pretoria",
"admin": "Gauteng",
"country": "South Africa",
"population_proper": "1338000",
"iso2": "ZA",
"capital": "primary",
"lat": "-25.706944",
"lng": "28.229444",
"population": "1338000"
},
{
"city": "Vereeniging",
"admin": "Gauteng",
"country": "South Africa",
"population_proper": "474681",
"iso2": "ZA",
"capital": "",
"lat": "-26.673133",
"lng": "27.926147",
"population": "1074000"
},
{
"city": "Port Elizabeth",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "640054",
"iso2": "ZA",
"capital": "",
"lat": "-33.917988",
"lng": "25.570066",
"population": "1021000"
},
{
"city": "Pietermaritzburg",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "490951",
"iso2": "ZA",
"capital": "admin",
"lat": "-29.616785",
"lng": "30.39278",
"population": "750845"
},
{
"city": "Bloemfontein",
"admin": "Free State",
"country": "South Africa",
"population_proper": "456669",
"iso2": "ZA",
"capital": "primary",
"lat": "-29.121065",
"lng": "26.214003",
"population": "463064"
},
{
"city": "Welkom",
"admin": "Free State",
"country": "South Africa",
"population_proper": "126079",
"iso2": "ZA",
"capital": "",
"lat": "-27.986442",
"lng": "26.706612",
"population": "431944"
},
{
"city": "Nelspruit",
"admin": "Mpumalanga",
"country": "South Africa",
"population_proper": "21540",
"iso2": "ZA",
"capital": "admin",
"lat": "-25.474482",
"lng": "30.970333",
"population": "348138"
},
{
"city": "East London",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "338627",
"iso2": "ZA",
"capital": "",
"lat": "-33.011051",
"lng": "27.910049",
"population": "338627"
},
{
"city": "Thohoyandou",
"admin": "Limpopo",
"country": "South Africa",
"population_proper": "44046",
"iso2": "ZA",
"capital": "",
"lat": "-22.945642",
"lng": "30.484972",
"population": "269707"
},
{
"city": "Springs",
"admin": "Gauteng",
"country": "South Africa",
"population_proper": "186394",
"iso2": "ZA",
"capital": "",
"lat": "-26.258374",
"lng": "28.47173",
"population": "236083"
},
{
"city": "Uitenhage",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "206766",
"iso2": "ZA",
"capital": "",
"lat": "-33.75757",
"lng": "25.397099",
"population": "228912"
},
{
"city": "Polokwane",
"admin": "Limpopo",
"country": "South Africa",
"population_proper": "123749",
"iso2": "ZA",
"capital": "admin",
"lat": "-23.904485",
"lng": "29.468851",
"population": "220045"
},
{
"city": "Paarl",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "131718",
"iso2": "ZA",
"capital": "",
"lat": "-33.733781",
"lng": "18.975228",
"population": "187865"
},
{
"city": "Klerksdorp",
"admin": "North West",
"country": "South Africa",
"population_proper": "147804",
"iso2": "ZA",
"capital": "",
"lat": "-26.852128",
"lng": "26.666719",
"population": "178921"
},
{
"city": "George",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "113248",
"iso2": "ZA",
"capital": "",
"lat": "-33.963",
"lng": "22.461727",
"population": "174582"
},
{
"city": "Rustenburg",
"admin": "North West",
"country": "South Africa",
"population_proper": "124064",
"iso2": "ZA",
"capital": "",
"lat": "-25.667562",
"lng": "27.242079",
"population": "165976"
},
{
"city": "Kimberley",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "142089",
"iso2": "ZA",
"capital": "admin",
"lat": "-28.732262",
"lng": "24.762315",
"population": "165264"
},
{
"city": "Bhisho",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "137287",
"iso2": "ZA",
"capital": "admin",
"lat": "-32.847212",
"lng": "27.442179",
"population": "160997"
},
{
"city": "Middelburg",
"admin": "Mpumalanga",
"country": "South Africa",
"population_proper": "93790",
"iso2": "ZA",
"capital": "",
"lat": "-25.775071",
"lng": "29.464821",
"population": "154706"
},
{
"city": "Vryheid",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "66717",
"iso2": "ZA",
"capital": "",
"lat": "-27.76952",
"lng": "30.791653",
"population": "150012"
},
{
"city": "Umtata",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "78663",
"iso2": "ZA",
"capital": "",
"lat": "-31.588926",
"lng": "28.784431",
"population": "137772"
},
{
"city": "Worcester",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "90803",
"iso2": "ZA",
"capital": "",
"lat": "-33.64651",
"lng": "19.448523",
"population": "127597"
},
{
"city": "Potchefstroom",
"admin": "North West",
"country": "South Africa",
"population_proper": "83814",
"iso2": "ZA",
"capital": "",
"lat": "-26.716667",
"lng": "27.1",
"population": "123669"
},
{
"city": "Brits",
"admin": "North West",
"country": "South Africa",
"population_proper": "39947",
"iso2": "ZA",
"capital": "",
"lat": "-25.634731",
"lng": "27.780224",
"population": "122497"
},
{
"city": "Queenstown",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "87240",
"iso2": "ZA",
"capital": "",
"lat": "-31.897563",
"lng": "26.875329",
"population": "105309"
},
{
"city": "Mmabatho",
"admin": "North West",
"country": "South Africa",
"population_proper": "76754",
"iso2": "ZA",
"capital": "",
"lat": "-25.85",
"lng": "25.633333",
"population": "104428"
},
{
"city": "Kroonstad",
"admin": "Free State",
"country": "South Africa",
"population_proper": "72835",
"iso2": "ZA",
"capital": "",
"lat": "-27.65036",
"lng": "27.234879",
"population": "103992"
},
{
"city": "Bethal",
"admin": "Mpumalanga",
"country": "South Africa",
"population_proper": "90450",
"iso2": "ZA",
"capital": "",
"lat": "-26.457937",
"lng": "29.465534",
"population": "101919"
},
{
"city": "Grahamstown",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "49083",
"iso2": "ZA",
"capital": "",
"lat": "-33.304216",
"lng": "26.53276",
"population": "91548"
},
{
"city": "Bethlehem",
"admin": "Free State",
"country": "South Africa",
"population_proper": "49092",
"iso2": "ZA",
"capital": "",
"lat": "-28.230779",
"lng": "28.307071",
"population": "83654"
},
{
"city": "Oudtshoorn",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "47998",
"iso2": "ZA",
"capital": "",
"lat": "-33.592343",
"lng": "22.205482",
"population": "76708"
},
{
"city": "Standerton",
"admin": "Mpumalanga",
"country": "South Africa",
"population_proper": "18093",
"iso2": "ZA",
"capital": "",
"lat": "-26.933655",
"lng": "29.241518",
"population": "74021"
},
{
"city": "Upington",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "52799",
"iso2": "ZA",
"capital": "",
"lat": "-28.447758",
"lng": "21.256121",
"population": "71373"
},
{
"city": "Saldanha",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "6654",
"iso2": "ZA",
"capital": "",
"lat": "-33.01167",
"lng": "17.944202",
"population": "68284"
},
{
"city": "Tzaneen",
"admin": "Limpopo",
"country": "South Africa",
"population_proper": "16954",
"iso2": "ZA",
"capital": "",
"lat": "-23.833222",
"lng": "30.163506",
"population": "67245"
},
{
"city": "Knysna",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "4668",
"iso2": "ZA",
"capital": "",
"lat": "-34.036643",
"lng": "23.049704",
"population": "63106"
},
{
"city": "Graaff-Reinet",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "3021",
"iso2": "ZA",
"capital": "",
"lat": "-32.25",
"lng": "24.55",
"population": "62896"
},
{
"city": "Port Shepstone",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "21858",
"iso2": "ZA",
"capital": "",
"lat": "-30.741369",
"lng": "30.454992",
"population": "52793"
},
{
"city": "Vryburg",
"admin": "North West",
"country": "South Africa",
"population_proper": "13590",
"iso2": "ZA",
"capital": "",
"lat": "-26.956588",
"lng": "24.728403",
"population": "49588"
},
{
"city": "Ladysmith",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "47375",
"iso2": "ZA",
"capital": "",
"lat": "-28.553914",
"lng": "29.782697",
"population": "47375"
},
{
"city": "Beaufort West",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "11404",
"iso2": "ZA",
"capital": "",
"lat": "-32.356708",
"lng": "22.582947",
"population": "44737"
},
{
"city": "Aliwal North",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "8410",
"iso2": "ZA",
"capital": "",
"lat": "-30.693664",
"lng": "26.711407",
"population": "44436"
},
{
"city": "Volksrust",
"admin": "Mpumalanga",
"country": "South Africa",
"population_proper": "7411",
"iso2": "ZA",
"capital": "",
"lat": "-27.365415",
"lng": "29.881752",
"population": "43378"
},
{
"city": "Lebowakgomo",
"admin": "Limpopo",
"country": "South Africa",
"population_proper": "397",
"iso2": "ZA",
"capital": "",
"lat": "-24.2",
"lng": "29.5",
"population": "33308"
},
{
"city": "Cradock",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "32898",
"iso2": "ZA",
"capital": "",
"lat": "-32.164221",
"lng": "25.619176",
"population": "32898"
},
{
"city": "De Aar",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "5021",
"iso2": "ZA",
"capital": "",
"lat": "-30.649657",
"lng": "24.012295",
"population": "32318"
},
{
"city": "Hermanus",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "7396",
"iso2": "ZA",
"capital": "",
"lat": "-34.418696",
"lng": "19.234464",
"population": "25153"
},
{
"city": "Ulundi",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "5581",
"iso2": "ZA",
"capital": "",
"lat": "-28.335233",
"lng": "31.416172",
"population": "20753"
},
{
"city": "Komatipoort",
"admin": "Mpumalanga",
"country": "South Africa",
"population_proper": "159",
"iso2": "ZA",
"capital": "",
"lat": "-25.433205",
"lng": "31.954777",
"population": "20508"
},
{
"city": "Messina",
"admin": "Limpopo",
"country": "South Africa",
"population_proper": "3505",
"iso2": "ZA",
"capital": "",
"lat": "-22.351308",
"lng": "30.039597",
"population": "20191"
},
{
"city": "Middelburg",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "3764",
"iso2": "ZA",
"capital": "",
"lat": "-31.492849",
"lng": "25.00633",
"population": "18164"
},
{
"city": "Port Alfred",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "795",
"iso2": "ZA",
"capital": "",
"lat": "-33.590572",
"lng": "26.891037",
"population": "17959"
},
{
"city": "Bloemhof",
"admin": "North West",
"country": "South Africa",
"population_proper": "4203",
"iso2": "ZA",
"capital": "",
"lat": "-27.646855",
"lng": "25.606973",
"population": "17122"
},
{
"city": "Mossel Bay",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "16743",
"iso2": "ZA",
"capital": "",
"lat": "-34.183067",
"lng": "22.146048",
"population": "16743"
},
{
"city": "Bredasdorp",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "1970",
"iso2": "ZA",
"capital": "",
"lat": "-34.532154",
"lng": "20.040308",
"population": "14936"
},
{
"city": "Swellendam",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "4234",
"iso2": "ZA",
"capital": "",
"lat": "-34.022624",
"lng": "20.441706",
"population": "13674"
},
{
"city": "Colesberg",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "2559",
"iso2": "ZA",
"capital": "",
"lat": "-30.719994",
"lng": "25.097185",
"population": "12423"
},
{
"city": "Brandfort",
"admin": "Free State",
"country": "South Africa",
"population_proper": "289",
"iso2": "ZA",
"capital": "",
"lat": "-28.700083",
"lng": "26.459684",
"population": "12091"
},
{
"city": "Prieska",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "4045",
"iso2": "ZA",
"capital": "",
"lat": "-29.664088",
"lng": "22.747421",
"population": "11236"
},
{
"city": "Springbok",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "2809",
"iso2": "ZA",
"capital": "",
"lat": "-29.664336",
"lng": "17.886495",
"population": "10438"
},
{
"city": "Kuruman",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "9093",
"iso2": "ZA",
"capital": "",
"lat": "-27.452402",
"lng": "23.432456",
"population": "10006"
},
{
"city": "Port Saint John\u2019s",
"admin": "Eastern Cape",
"country": "South Africa",
"population_proper": "5939",
"iso2": "ZA",
"capital": "",
"lat": "-31.622911",
"lng": "29.544772",
"population": "5939"
},
{
"city": "Carnarvon",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "5785",
"iso2": "ZA",
"capital": "",
"lat": "-30.968266",
"lng": "22.133026",
"population": "5785"
},
{
"city": "Pofadder",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "4220",
"iso2": "ZA",
"capital": "",
"lat": "-29.128299",
"lng": "19.394915",
"population": "4220"
},
{
"city": "Vanrhynsdorp",
"admin": "Western Cape",
"country": "South Africa",
"population_proper": "3331",
"iso2": "ZA",
"capital": "",
"lat": "-31.608885",
"lng": "18.739679",
"population": "3331"
},
{
"city": "Alexander Bay",
"admin": "Northern Cape",
"country": "South Africa",
"population_proper": "1452",
"iso2": "ZA",
"capital": "",
"lat": "-28.596078",
"lng": "16.485355",
"population": "1500"
},
{
"city": "Ubombo",
"admin": "KwaZulu-Natal",
"country": "South Africa",
"population_proper": "564",
"iso2": "ZA",
"capital": "",
"lat": "-27.563673",
"lng": "32.085699",
"population": "564"
},
{
"city": "Mahikeng",
"admin": "North West",
"country": "South Africa",
"population_proper": "",
"iso2": "ZA",
"capital": "admin",
"lat": "-25.86522",
"lng": "25.644213",
"population": ""
}
]<file_sep>import { Injectable } from "@angular/core";
import { VoucherApiService } from "src/app/core-modules/services/shared/coupons/voucher-api/voucher-api.service";
import { VoucherStateService } from "src/app/core-modules/services/shared/coupons/voucher-state/voucher-state.service";
@Injectable({
providedIn: "root",
})
export class VoucherFacadeService {
constructor(
private voucherApi: VoucherApiService,
private voucherState: VoucherStateService
) {}
isUpdating$() {
return this.voucherState.isUpdating$();
}
resetVouchers() {
return this.voucherState.resetVouchers();
}
getVouchersLength() {
return this.voucherState.getVouchersLength$();
}
getStoreVouchers() {
return this.voucherState.getStoreVouchers();
}
getStoreVouchers$() {
// here we just pass the state without any projections
// it may happen that it is necessary to combine two or more streams and expose to the components
return this.voucherState.getVouchers$();
}
loadStoreVouchers() {
this.voucherState.setUpdating(true);
this.voucherApi.getStoreVouchers().subscribe(
(res) => {
let vouchers = res.vouchers;
let array_length = vouchers.length;
this.voucherState.setVouchers(vouchers, array_length);
},
(err) => {
console.log(err);
},
() => this.voucherState.setUpdating(false)
);
}
loadStoreVouchersById(id) {
this.voucherState.setUpdating(true);
this.voucherApi.getStoreVouchersById(id).subscribe(
(res) => {
let vouchers = res.vouchers;
let array_length = vouchers.length;
this.voucherState.setVouchers(vouchers, array_length);
},
(err) => {
console.log(err);
},
() => this.voucherState.setUpdating(false)
);
}
// update voucher optimistic way
updateStoreVoucher(data) {
this.voucherState.setUpdating(true);
//update voucher state
this.voucherState.updateVoucher(data);
//update database voucher item status
this.voucherApi.updateVoucher(data).subscribe(
(res) => {
console.log("item updated");
},
(err) => {
//we need to rollback
console.log(err);
},
() => this.voucherState.setUpdating(false)
);
}
createVoucher(voucher) {
this.voucherState.setUpdating(true);
this.voucherApi.createVoucher(voucher).subscribe(
(res) => {
let new_voucher = res.voucher;
// success callback - we have voucher generated by the server, let's update the state
this.voucherState.addVoucher(new_voucher);
},
(error) => {
console.log(error);
},
() => this.voucherState.setUpdating(false)
);
}
removeStoreVoucher(voucher) {
//change the state
this.voucherState.removeVoucher(voucher);
//update database
let id = { id: voucher._id };
this.voucherApi.deleteVoucher(id).subscribe(
(res) => {
console.log(res);
},
(err) => {
console.log(err);
}
);
}
/****voucher products user handle****/
getSpecialPromos() {
return this.voucherApi.getSpecialPromos();
}
postSpecialPromos(data) {
return this.voucherApi.postSpecialPromos(data);
}
getVouchersProducts() {
return this.voucherState.getVouchersProducts$();
}
loadVouchers() {
this.voucherState.setUpdating(true);
this.voucherApi.getAllVouchers().subscribe(
(res) => {
let promos = res.vouchers;
let products = [];
let m_product;
for (let promo of promos) {
for (let p of promo.products) {
let type = promo.type;
let discount = p.discount;
let product = p.prod;
let items_exceeding = p.items_exceeding;
let quota = p.p_quota;
let secondary_product = p.s_pro;
let modified_price;
let product_bind;
switch (type) {
case "itemDiscount":
modified_price = product.productPrice - discount;
m_product = this.modifyProduct(
null,
product,
modified_price,
product.amount,
type,
null,
null
);
products.push(m_product);
break;
case "buy1get1free":
modified_price = product.productPrice;
product.productPrice = modified_price / 2;
product.amount = 2;
product.sec = null;
product.original_price = modified_price;
product.sale = true;
product.sale_type = type;
product.product_bind = null;
product.promo_kind = type;
product.secondary_product = secondary_product;
product.thresh = product.amount;
products.push(product);
break;
case "volume":
let sub_total =
product.productPrice * items_exceeding - discount;
let new_item_price = sub_total / items_exceeding;
m_product = this.modifyProduct(
null,
product,
new_item_price,
items_exceeding,
type,
null,
null
);
products.push(m_product);
break;
case "combo":
product_bind = [product._id, secondary_product._id];
modified_price =
secondary_product.productPrice +
product.productPrice -
discount;
//minify this script
let secondary_product_b = {
...secondary_product,
};
(secondary_product_b.original_price =
secondary_product.productPrice),
(secondary_product_b.storeId = secondary_product.storeId),
(secondary_product_b.product_bind = product_bind),
(secondary_product_b.promo_kind = type),
(secondary_product_b.thresh = secondary_product.amount),
(product.productPrice =
secondary_product.productPrice + product.productPrice);
m_product = this.modifyProduct(
null,
product,
modified_price,
product.amount,
type,
product_bind,
secondary_product_b
);
products.push(m_product);
break;
}
}
}
this.voucherState.setVouchersProducts(products);
},
(err) => {
console.log(err);
}
);
}
//modify the products and convert to voucher prices
modifyProduct(
sec,
product,
modified_price,
amount,
promo_kind,
product_bind,
secondary_product
) {
product.sec = sec;
product.amount = amount;
product.original_price = product.productPrice;
product.productPrice = modified_price;
product.sale = true;
product.sale_type = promo_kind;
product.product_bind = product_bind;
product.promo_kind = promo_kind;
product.secondary_product = secondary_product;
product.thresh = amount;
return product;
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { AuthApiService } from "src/app/core-modules/services/auth/auth-api/auth-api.service";
import { Router } from "@angular/router";
import { VoucherFacadeService } from "src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { LoadingController } from "@ionic/angular";
@Component({
selector: "app-stores-list",
templateUrl: "./stores-list.component.html",
styleUrls: ["./stores-list.component.scss"],
})
export class StoresListComponent implements OnInit {
public stores$;
public user;
constructor(
private userFacade: UserFacadeService,
private authApi: AuthApiService,
private router: Router,
private voucherFacade: VoucherFacadeService,
private productFacade: ProductFacadeService,
private orderFacade: OrderFacadeService,
public loadingController: LoadingController
) {}
ngOnInit() {
this.userFacade.getUser$().subscribe(
(res) => {
console.log(res);
this.user = res;
},
(err) => {
console.log(err);
}
);
this.presentLoading();
this.authApi
.getVendorStores()
.pipe()
.subscribe((res) => {
this.loadingController.dismiss();
this.stores$ = res.stores;
console.log(res);
});
}
async presentLoading() {
const loading = await this.loadingController.create({
cssClass: "custom-class",
message: "Please wait...",
// duration: 2000,
});
await loading.present();
const { role, data } = await loading.onDidDismiss();
loading.dismiss()
console.log("Loading dismissed!");
}
SwitchStore(store) {
this.productFacade.loadStoreProducts(store._id);
this.userFacade.setCurrentStore(store);
console.log(store);
setTimeout(() => {
this.router.navigate(["/admin-store"]);
}, 1000);
}
}
<file_sep>import {
Component,
OnInit,
ViewChild,
Output,
EventEmitter,
HostListener,
OnDestroy
} from "@angular/core";
import { Camera, CameraOptions } from "@ionic-native/camera/ngx";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { FileTransfer, FileUploadOptions, FileTransferObject } from '@ionic-native/file-transfer/ngx';
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
import { Router } from "@angular/router";
import { map, finalize } from "rxjs/operators";
import { Observable } from "rxjs";
import {
ActionSheetController,
LoadingController,
Platform,
ToastController,
} from "@ionic/angular";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { FormGroup, FormArray, FormBuilder, Validators } from "@angular/forms";
import { ImageCroppedEvent, ImageCropperComponent } from "ngx-image-cropper";
import { IonSlides } from "@ionic/angular";
import { AlertController } from '@ionic/angular';
import { AngularFireStorage, AngularFireUploadTask } from '@angular/fire/storage';
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
//import { AngularFireStorage, AngularFireUploadTask } from 'angularfire2/storage'
@Component({
selector: "app-add-product",
templateUrl: "./add-product.component.html",
styleUrls: ["./add-product.component.scss"],
})
export class AddProductComponent implements OnInit, OnDestroy {
@HostListener('unloaded')
@Output() segment: EventEmitter<any> = new EventEmitter<any>();
@ViewChild("add") slides: IonSlides;
//firebase
task: AngularFireUploadTask;
progress: any; // Observable 0 to 100
image: string; // base64
public imagePickerOptions = {
maximumImagesCount: 1,
quality: 50,
};
private createProduct: FormGroup;
private productComponentsForm: FormGroup;
private productMetatagsForm: FormGroup;
private productFeaturesForm: FormGroup;
public android_platform: boolean;
public metaTagList: FormArray;
public featureList: FormArray;
public bill_of_material: any[] = [];
public picture_uploaded: any[] = [];
public file_uploaded = [];
public component_file_uploaded = [];
public component_picture_uploaded = [];
categories$;
isUpdating$;
imageurl: any;
device_screen;
import;
public defaultR = 'outlined';
oneone;
done = false;
addBillOfMaterial;
material_substitude;
bill_type_selected;
component_info_added;
selected_extra;
category;
showNext;
slides_number = 1;
pic_remove;
sub_category;
sub_segment_category;
show_this_selected_categories = [];
product_types;
//this are properties that hide and show categories
category_view = true;
sub_category_view = true;
sub_segment_category_view = true;
product_types_view = true;
// BOM types go here
public BOM_type;
public small = true;
// images
@ViewChild(ImageCropperComponent, { static: false })
angularCropper: ImageCropperComponent;
myImage = null;
croppedFileImage = null;
croppedImage = null;
file_name;
myFileImages = [];
imageURI;
showBack;
private choose_category = {
category: [""],
sub_category: [""],
product_segment: [""],
product_type: [""],
unit: [""],
};
get metaTagFormGroup() {
return this.productMetatagsForm.get("metaTags") as FormArray;
}
get featureFormGroup() {
return this.productFeaturesForm.get("features") as FormArray;
}
constructor(
private formBuilder: FormBuilder,
private categoryFacade: CategoryFacadeService,
private productFacade: ProductFacadeService,
private productApi: ProductApiService,
private router: Router,
private transfer: FileTransfer,
public toastController: ToastController,
private userFacade: UserFacadeService,
private camera: Camera,
public actionSheetController: ActionSheetController,
private platform: Platform,
public loadingController: LoadingController,
public alertController: AlertController,
public storage: AngularFireStorage,
breakpointObserver: BreakpointObserver,
) {
////loading
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
this.isUpdating$ = this.categoryFacade.isUpdating$();
}
ionViewDidEnter() {
this.slides.lockSwipes(true);
}
ngOnInit() {
//slides functions
this.addBillOfMaterial = true;
this.showBack = false;
this.showNext = true;
//end of slide functions
this.android_platform = this.platform.is("android");
console.log(this.android_platform)
//getCategories
this.categories$ = this.categoryFacade.getCategories$();
this.createProduct = this.formBuilder.group({
productName: [
"",
Validators.compose([Validators.minLength(2), Validators.required]),
],
productDescription: [
"",
Validators.compose([Validators.minLength(5), Validators.required]),
],
productPrice: [
"",
Validators.compose([Validators.minLength(2), Validators.required]),
],
in_stock: ["",
Validators.compose([Validators.minLength(2), Validators.required]),],
});
this.productMetatagsForm = this.formBuilder.group({
metaTags: this.formBuilder.array([this.createMetaTag()]),
});
this.productFeaturesForm = this.formBuilder.group({
features: this.formBuilder.array([this.createFeature()]),
});
this.productComponentsForm = this.formBuilder.group({
componentname: ["", Validators.compose([Validators.minLength(2)])],
price: ["", Validators.compose([Validators.minLength(2)])],
base_material_price: ["", Validators.compose([Validators.minLength(2)])],
descrip: ["", Validators.compose([Validators.minLength(2)])],
unit_of_measure: ["", Validators.compose([Validators.minLength(2)])],
});
// set metaTagList to the form control containing propeties
this.metaTagList = this.productMetatagsForm.get("metaTags") as FormArray;
this.featureList = this.productFeaturesForm.get("features") as FormArray;
}
///*** components bill of material*/
addComponent() {
function randomString(length, chars) {
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
let length = 10;
let chars =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
let id = randomString(length, chars);
let pic1 = this.component_picture_uploaded[0];
let pic2 = this.component_picture_uploaded[1];
let pic3 = this.component_picture_uploaded[2];
console.log(this.component_picture_uploaded)
this.presentLoading();
const uploadPictures = async () => {
if (pic1) {
await this.uploadBOMPix(pic1, id)
}
if (pic2) {
await this.uploadBOMPix(pic2, id)
}
if (pic3) {
await this.uploadBOMPix(pic3, id)
}
}
uploadPictures()
}
uploadBOMPix(pic, id) {
console.log(pic)
fetch(pic)
.then((res) => res.blob())
.then((blob) => {
const file = new File([blob], this.file_name, {
type: "image/png",
});
if (this.platform.is("cordova")) {
var n = Date.now();
//where is stored in the cloud
const filePath = `Products/${n}.jpeg`;
this.image = pic
let fileRef = this.storage.ref(filePath)
this.task = fileRef.putString(this.image, 'data_url', { contentType: "image/jpeg" });
this.progress = this.task.percentageChanges();
this.task.snapshotChanges()
.pipe(
finalize(() => {
let data = fileRef.getDownloadURL();
data.subscribe(url => {
let url_img
if (url) {
url_img = url;
console.log(url)
}
this.component_file_uploaded.push({ id: "Images/firebase", url: url_img })
if (this.component_picture_uploaded.length == this.component_file_uploaded.length) {
let component = {
...this.productComponentsForm.value,
BOM_type: this.BOM_type,
selected: false,
_id: id,
pictures: this.component_file_uploaded
};
this.bill_of_material.push(component);
this.component_file_uploaded = []
this.component_picture_uploaded = []
}
});
})
)
.subscribe(url => {
if (url) {
return
}
});
} else {
const _formData = new FormData();
_formData.append("image", file);
//save picture
console.log(file, _formData)
this.productApi.uploadImage(_formData).subscribe((res) => {
let data = res.data[0]
console.log(res)
if (data.url) {
this.component_file_uploaded.push(data)
console.log(this.component_picture_uploaded, this.component_file_uploaded)
if (this.component_picture_uploaded.length == this.component_file_uploaded.length) {
let component = {
...this.productComponentsForm.value,
BOM_type: this.BOM_type,
selected: false,
_id: id,
pictures: this.component_file_uploaded
};
this.bill_of_material.push(component);
this.component_file_uploaded = []
this.component_picture_uploaded = []
}
} else {
this.presentToast("Ooops try to use browser");
}
return
}, err => {
console.log(err)
});
}
});
}
removeComponent(index) {
this.bill_of_material.splice(index, 1);
}
///***End components */
// Generate new metaTag
createMetaTag(): FormGroup {
return this.formBuilder.group({
metaTagname: [null, Validators.compose([Validators.required])],
});
}
createFeature(): FormGroup {
return this.formBuilder.group({
featurename: [null, Validators.compose([Validators.required])],
});
}
addMetaTag() {
this.metaTagList.push(this.createMetaTag());
}
addFeature() {
this.featureList.push(this.createFeature());
}
removeMetaTag(index) {
this.metaTagList.removeAt(index);
}
removeFeature(index) {
this.featureList.removeAt(index);
}
getMetaTagsFormGroup(index): FormGroup {
const formGroup = this.metaTagList.controls[index] as FormGroup;
return formGroup;
}
getFeaturesFormGroup(index): FormGroup {
const formGroup = this.featureList.controls[index] as FormGroup;
return formGroup;
}
showCSV() {
if (this.import) {
this.import = false;
} else {
this.import = true;
}
}
/**** Images****/
handleFileInput(event) {
let file = event.target.files[0];
//android
if (this.picture_uploaded.length >= 3) {
this.presentToast("Only three pictures can be added");
} else {
if (file.type == "image/jpeg" || file.type == "image/png") {
//set MyImage
this.getBase64(file);
} else {
this.presentToast("check your file format");
}
}
}
handleComponentFileInput(event) {
let file = event.target.files[0];
//android
if (this.component_picture_uploaded.length >= 3) {
this.presentToast("Only three pictures can be added");
} else {
if (file.type == "image/jpeg" || file.type == "image/png") {
//set MyImage
this.getBase64(file);
} else {
this.presentToast("check your file format");
}
}
}
async selectImage() {
const actionSheet = await this.actionSheetController.create({
header: "Select Image source",
buttons: [
{
text: "Load from Library",
handler: () => {
this.pickImage(this.camera.PictureSourceType.PHOTOLIBRARY);
},
},
{
text: "Use Camera",
handler: () => {
this.pickImage(this.camera.PictureSourceType.CAMERA);
},
},
{
text: "Cancel",
role: "cancel",
},
],
});
await actionSheet.present();
}
pickImage(sourceType) {
const options: CameraOptions = {
quality: 100,
sourceType: sourceType,
destinationType: this.camera.DestinationType.DATA_URL,
encodingType: this.camera.EncodingType.JPEG,
mediaType: this.camera.MediaType.PICTURE,
};
this.camera.getPicture(options).then(
(imageData) => {
// imageData is either a base64 encoded string or a file URI
// If it's base64 (DATA_URL):
this.myImage = "data:image/jpeg;base64," + imageData;
},
(err) => {
// Handle error
this.presentToast("oops something went wrong");
}
);
}
imageCropped(event: ImageCroppedEvent) {
this.croppedImage = event.base64;
}
clearOne(image) {
let new_array = this.picture_uploaded.filter((res) => {
return res !== image;
});
this.picture_uploaded = new_array;
}
clearOneComponent(image) {
let new_array = this.component_picture_uploaded.filter((res) => {
return res !== image;
});
this.component_picture_uploaded = new_array;
}
clearImage() {
this.angularCropper.imageBase64 = null;
this.myImage = null;
this.croppedImage = null;
}
saveCropper() {
this.angularCropper.crop();
this.picture_uploaded.push(this.croppedImage);
this.clearImage();
}
saveComponentCropper() {
this.angularCropper.crop();
this.component_picture_uploaded.push(this.croppedImage);
this.clearImage();
}
convertBase64ToFile(url) {
fetch(url)
.then((res) => res.blob())
.then((blob) => {
const file = new File([blob], this.file_name, { type: "image/png" });
return file;
});
}
getBase64(file) {
this.file_name = file.name;
var reader = new FileReader();
let image;
reader.readAsDataURL(file);
reader.onload = function () {
image = reader.result;
};
setTimeout(() => {
this.myImage = image;
}, 2000);
reader.onerror = function (error) {
console.log("Error: ", error);
};
}
onSubmit() {
//save picture
let pic1 = this.picture_uploaded[0];
let pic2 = this.picture_uploaded[1];
let pic3 = this.picture_uploaded[2];
console.log(this.picture_uploaded)
this.presentLoading();
const uploadPictures = async () => {
if (pic1) {
await this.uploadPix(pic1)
}
if (pic2) {
await this.uploadPix(pic2)
}
if (pic3) {
await this.uploadPix(pic3)
}
}
uploadPictures()
this.done = true
}
uploadPix(pic) {
fetch(pic)
.then((res) => res.blob())
.then((blob) => {
const file = new File([blob], this.file_name, {
type: "image/jpeg",
});
//android
console.log(file)
if (this.platform.is('cordova')) {
var n = Date.now();
//where is stored in the cloud
const filePath = `Products/${n}.jpeg`;
this.image = pic
let fileRef = this.storage.ref(filePath)
this.task = fileRef.putString(this.image, 'data_url', { contentType: "image/jpeg" });
this.progress = this.task.percentageChanges();
this.task.snapshotChanges()
.pipe(
finalize(() => {
let data = fileRef.getDownloadURL();
data.subscribe(url => {
let url_img
if (url) {
url_img = url;
console.log(url)
}
this.file_uploaded.push({ id: "Images/firebase", url: url_img });
});
})
)
.subscribe(url => {
if (url) {
return
}
});
} else {
//web
const _formData = new FormData();
_formData.append("image", file);
//save picture
console.log(file, _formData)
this.productApi.uploadImage(_formData).subscribe((res) => {
let data = res.data[0]
console.log(res)
if (data.url) {
this.file_uploaded.push(data);
} else {
this.presentToast("Ooops try to use browser");
}
return
}, err => {
console.log(err)
});
}
});
}
async presentLoading() {
const loading = await this.loadingController.create({
cssClass: "custom-class",
message: "Please wait...",
duration: 5000,
});
await loading.present();
const { role, data } = await loading.onDidDismiss();
loading.dismiss()
console.log("Loading dismissed!");
}
pushToDatabase() {
let store = this.userFacade.getCurrentStore();
let product = {
productName: this.createProduct.value.productName,
productDescription: this.createProduct.value.productDescription,
productImage: this.file_uploaded,
metaTags: this.productMetatagsForm.value.metaTags,
features: this.productFeaturesForm.value.features,
in_stock: this.createProduct.value.in_stock,
productPrice: this.createProduct.value.productPrice,
bill_of_material: this.bill_of_material,
selectCategory: this.choose_category,
store_id: store._id,
};
//save produc
console.log(product)
this.productFacade.createProduct(product);
///redirect to products
this.presentToast("product successfully uploaded");
//
console.log(product)
//this.router.navigate["/store/"]
setTimeout(() => {
location.reload()
}, 1000)
}
/***************Select Category ***********************/
//category selected
onChange(event) {
//get event
let category_name = event;
//compare and assign category list
this.categories$.subscribe(
(res) => {
let category = res.filter((item) => {
return item.name == category_name;
});
let sub_cat = category[0].list;
// loop through category list and group
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
// Group by subcategor
let grouped = groupBy(sub_cat, "Categories");
this.sub_category = grouped;
},
(err) => {
console.log(err);
}
);
}
onChangeSub(event) {
let sub_name = event;
//check if it exists and pass
let arr = this.sub_category[sub_name];
// loop through category list and group
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
// Group by sub segment categor
let grouped = groupBy(arr, "Product segment");
this.sub_segment_category = grouped;
}
/*onChangeSub(event) {
}*/
onChangeCatSeg(event) {
let sub_name = event;
let new_sub = [];
//check if it exists and pass
if (this.sub_segment_category.hasOwnProperty(sub_name)) {
new_sub.push(this.sub_segment_category[sub_name]);
}
// loop through category list and group
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
// Group by sub segment categor
let grouped = groupBy(new_sub[0], "Product type");
this.product_types = grouped;
}
onChangeProductType(event) {
let p_type = event;
let arr = this.product_types[p_type];
for (let x of arr) {
if (x["Product type"] == p_type) {
//this.choose_category.value.unit = x.Unit;
}
}
}
set(setThis, value) {
switch (setThis) {
case "category": {
this.choose_category.category = value;
this.show_this_selected_categories.push(value);
this.category_view = false;
this.onChange(value);
break;
}
case "sub_category": {
this.choose_category.sub_category = value;
this.show_this_selected_categories.push(value);
this.sub_category_view = false;
this.onChangeSub(value);
break;
}
case "sub_segment_category": {
this.choose_category.product_segment = value;
this.show_this_selected_categories.push(value);
this.sub_segment_category_view = false;
this.onChangeCatSeg(value);
break;
}
case "product_types": {
this.choose_category.product_type = value;
this.show_this_selected_categories.push(value);
this.product_types_view = false;
this.onChangeProductType(value);
break;
}
default: {
//statements;
break;
}
}
}
clearCategories() {
this.category_view = true;
this.sub_category = null;
this.sub_category_view = true;
this.sub_segment_category = null;
this.sub_segment_category_view = true;
this.product_types = null;
this.product_types_view = true;
this.show_this_selected_categories = ["clear"];
}
/***************Category Ends***********************/
onClickBack() {
if (this.import) {
this.import = false;
} else {
this.segment.emit("products");
}
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 2000,
position: "bottom",
});
toast.present();
}
slideChange(event) {
this.slides.getActiveIndex().then((data) => {
this.slides_number = data + 1;
if (data == 5) {
this.showNext = false;
this.showBack = true;
} else if (data == 0) {
this.showBack = false;
this.showNext = true;
} else {
this.showNext = true;
this.showBack = true;
}
});
this.slides.lockSwipes(true);
}
next() {
this.slides.lockSwipes(false);
this.slides.slideNext();
}
slidesBack() {
this.slides.lockSwipes(false);
this.slides.slidePrev();
}
aspectSelect(data) {
if (data == "large") {
this.defaultR = "outlined"
this.oneone = "normal"
this.small = true
} else if (data == "small") {
this.oneone = 'outlined'
this.defaultR = "normal"
this.small = false
}
}
removePic(data) {
if (data == this.pic_remove) {
this.pic_remove = null
} else {
this.pic_remove = data
}
}
yesAddBillOfMaterial() {
this.addBillOfMaterial = false
}
selectBom(data) {
switch (data) {
case "extra": {
this.BOM_type = "Extra"
this.bill_type_selected = true
this.showNext = false
this.showBack = false
break;
}
case "material": {
this.BOM_type = "material"
this.bill_type_selected = true
this.showNext = false
this.showBack = false
break;
}
case "clear": {
//statements;
this.BOM_type = null
this.bill_type_selected = false
this.selected_extra = false
this.material_substitude = false
this.showNext = true
this.showBack = true
break;
}
}
}
async presentAlertMultipleButtons() {
console.log("alert")
const alert = await this.alertController.create({
cssClass: 'my-custom-class',
header: 'Base Material Price',
message: 'Is the price of the standard material that this component will replace',
buttons: ['Cancel']
});
await alert.present();
}
ngOnDestroy() {
console.log('Items destroyed');
}
}
<file_sep>import { Component, OnInit, ViewChild } from "@angular/core";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { IonSlides } from "@ionic/angular";
import { ToastController } from "@ionic/angular";
import { Location } from "@angular/common";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { Router } from "@angular/router";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-return-policy",
templateUrl: "./return-policy.page.html",
styleUrls: ["./return-policy.page.scss"],
})
export class ReturnPolicyPage implements OnInit {
@ViewChild("check_slides", { static: true }) slides: IonSlides;
public current_question = "";
public added_question: any = [];
store_id;
device_screen
constructor(
private headerState: HeaderStateService,
public toastController: ToastController,
private _location: Location,
private router: Router,
private userFacade: UserFacadeService,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
public return_reason = [
"Damaged Goods",
"Wrong Order Delivered",
"SCAM",
"size variation ",
];
ngOnInit() {
this.store_id = localStorage.getItem("st");
}
next(slides) {
slides.slideNext();
this.presentToast("swipe to go back");
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 3000,
position: "bottom",
});
toast.present();
}
backClicked() {
this._location.back();
}
addQuestion() {
this.added_question.push(this.current_question);
this.current_question = "";
}
addReturnPolicy() {
this.userFacade.updateStoreProfile({
return_policy: this.added_question,
store_id: this.store_id,
});
//complete
this.router.navigate(["/admin-store/store"]);
}
}
<file_sep>import { Component, OnInit, ViewChild } from "@angular/core";
import { FormGroup, FormControl, FormControlName } from "@angular/forms";
import { IonSlides } from "@ionic/angular";
import { ToastController } from "@ionic/angular";
import { Router } from "@angular/router";
import { AuthApiService } from "src/app/core-modules/services/auth/auth-api/auth-api.service";
import { Data } from "./za";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { UserFacadeService } from 'src/app/core-modules/services/profile/profile-facade/profile-facade.service';
@Component({
selector: "app-update-info",
templateUrl: "./update-info.page.html",
styleUrls: ["./update-info.page.scss"],
})
export class UpdateInfoPage implements OnInit {
@ViewChild("UpdateinfoSlides", { static: true }) slides: IonSlides;
profileFormSlide1 = new FormGroup({
first_name: new FormControl(""),
last_name: new FormControl(""),
});
profileForm = new FormGroup({
address: new FormGroup({
unit: new FormControl(""),
street_name: new FormControl(""),
complex: new FormControl(""),
suburb: new FormControl(""),
city: new FormControl(""),
province: new FormControl(""),
zip: new FormControl(""),
}),
});
public Data: any;
public return_to;
showNext = true;
showBack = false;
constructor(
public toastController: ToastController,
private router: Router,
private authApi: AuthApiService,
private headerState: HeaderStateService,
private userFacade: UserFacadeService
) { }
async presentSaleToast(data) {
const toast = await this.toastController.create({
message: data,
position: "bottom",
animated: true,
duration: 2000,
});
toast.present();
}
ngOnInit() { }
onSubmit() {
this.return_to = this.headerState.return_route;
console.log(this.profileForm.value, this.profileFormSlide1.value);
let new_form = {
...this.profileForm.value,
...this.profileFormSlide1.value,
};
this.authApi.userUpdate(new_form).subscribe(
(res) => {
this.userFacade.loadUser()
this.presentSaleToast("Info Updated and ready to get started");
setTimeout(() => {
if (this.return_to) {
this.router.navigate([this.return_to])
} else {
this.router.navigate(["/user/user_profile"]);
}
}, 1500);
},
(err) => {
console.log(err);
this.presentSaleToast("Error!");
}
);
}
next() {
this.slides.slideNext();
this.showNext = false;
this.showBack = true;
}
slidesBack() {
this.showNext = true;
this.showBack = false;
this.slides.slidePrev();
}
}
<file_sep>import { Injectable } from "@angular/core";
import { EnvService } from "src/app/env.service";
import { HttpClient } from "@angular/common/http";
@Injectable({
providedIn: "root",
})
export class ProductApiService {
_urlGetProduct = `${this.env.apiUrl}/api/product/view_product`;
_urlgetSegmentProducts = `${this.env.apiUrl}/api/product/view_segment_products`;
_urlUpdateLikeProduct = `${this.env.apiUrl}/api/product/update_like`;
_urlGetUserLikedProducts = `${this.env.apiUrl}/api/product/get_user_liked_products`;
_urlViewStoreProductsById = `${this.env.apiUrl}/api/product/view_vendor_products`;
_urlViewProducts = `${this.env.apiUrl}/api/product/view_all_products`;
_urlUpdateReviewProduct = `${this.env.apiUrl}/api/product/update_review`;
_urlGetUserReviews = `${this.env.apiUrl}/api/product/get_user_reviews`;
constructor(private _http: HttpClient, private env: EnvService) {}
getProduct(data) {
return this._http.post<any>(this._urlGetProduct, data);
}
getSegmentProducts(data) {
return this._http.post<any>(this._urlgetSegmentProducts, data);
}
updateLike(data) {
return this._http.post<any>(this._urlUpdateLikeProduct, data);
}
getUserlikedProducts() {
return this._http.get<any>(this._urlGetUserLikedProducts);
}
getStoreProductsById(id) {
return this._http.post<any>(this._urlViewStoreProductsById, id);
}
getProducts() {
return this._http.get<any>(this._urlViewProducts);
}
updateReview(data) {
return this._http.post<any>(this._urlUpdateReviewProduct, data);
}
getUserReviews() {
return this._http.get<any>(this._urlGetUserReviews);
}
}
<file_sep>const mongoose = require("mongoose");
// User Schema
const Schema = mongoose.Schema;
const Order_ReceiptSchema = new Schema({
m_payment_id: {
type: String,
required: true
},
order_number: {
type: String,
required: true
},
payment_status: {
type: String,
required: true
},
user_email: {
type:String,
required: true
},
store_id: {
type:String,
required: true
},
delivery_address: {
type:String,
required: false
},
items: {
type: Array,
required: true
},
total_price: {
type: Number,
required: true
},
status: {
type: String,
required: true
},
date: {
type: Date,
default: Date.now
}
});
// User model
const Order_Receipt = mongoose.model('order_Receipt', Order_ReceiptSchema);
module.exports = Order_Receipt;
<file_sep>import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRoute } from '@angular/router';
import { AuthApiService } from 'src/app/core-modules/services/auth/auth-api/auth-api.service';
@Component({
selector: 'app-verify',
templateUrl: './verify.page.html',
styleUrls: ['./verify.page.scss'],
})
export class VerifyPage implements OnInit {
token: string;
message: string;
constructor(
private _authService: AuthApiService ,
private router: Router,
private activatedRoute: ActivatedRoute
) { }
ngOnInit() {
//get params from backend query string
this.activatedRoute.queryParams.subscribe(
params => {
this.token = params['token'];
}
)
//User Verified?
this._authService.verifyUser({token :this.token}).subscribe(
res => {
this.message = res.msg;
},
error => console.error('Error', error)
)
}
}
<file_sep>import { Component, OnInit, ViewChild } from "@angular/core";
import { FormControl, FormGroup } from "@angular/forms";
import { ProfileApiService } from "src/app/core-modules/services/profile/profile-api/profile-api.service";
import { Md5 } from "ts-md5/dist/md5";
import { IonSlides } from "@ionic/angular";
import { AuthStateService } from "src/app/core-modules/services/auth/auth-state/auth-state.service";
import { PayfastFacadeService } from "src/app/core-modules/services/payments/payfast-facade/payfast-facade.service";
import { Location } from "@angular/common";
import { PickerController } from "@ionic/angular";
import { PickerOptions } from "src/app/models/picker-model";
import {ToastService} from "src/app/core-modules/services/toast/toast.service";
import { UserFacadeService } from 'src/app/core-modules/services/profile/profile-facade/profile-facade.service';
import { AlertController } from '@ionic/angular';
import { Router } from '@angular/router';
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-pricing",
templateUrl: "./pricing.page.html",
styleUrls: ["./pricing.page.scss"],
})
export class PricingPage implements OnInit {
@ViewChild("userSlides") slides: IonSlides;
//Vars
payfastForm = {};
md5 = new Md5();
number_products ;
number_users;
license_focus ;
billing_cycle_selected;
showBack;
slides_number;
licenses;
other_licenses;
device_screen
current_store;
paynow = false
show_licenses;
recommended_license_index:any = [];
selected_license;
//Form
pricingForm = new FormGroup({
products_number: new FormControl(""),
});
constructor(
private toastService: ToastService,
private userApi: ProfileApiService,
private payFacade: PayfastFacadeService,
private _location: Location,
private userFacade: UserFacadeService,
private authState: AuthStateService,
private pickerCtrl: PickerController,
public alertController: AlertController,
private router: Router,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ionViewDidEnter() {
this.slides.lockSwipes(true);
}
ngOnInit() {
this.showBack = false;
this.current_store = this.userFacade.getCurrentStore();
this.authState.getPremadeLicenses().subscribe(
(res) => {
this.licenses =res
},
(err) => {
console.log(err);
}
);
this.show_licenses = false;
}
//Lisence is selected and assigned
postLicense(license){
let store = this.userFacade.getCurrentStore();
license.store_id = store._id
let today_ms = Date.now()
let one_day_ms = 86400000
let one_month_ms = one_day_ms * 30
let one_year_ms = one_day_ms * 30 * 12
let month_end_period = today_ms + one_month_ms
let year_end_period = today_ms + one_year_ms
if( this.billing_cycle_selected =="yearly" ){
license.payed_amount = {amount: license.annual_price , type: "Yearly", end_date: year_end_period }
}else{
license.payed_amount = {amount: license.monthly_price , type: "Monthly", end_date: month_end_period}
}
//assign license
this.selected_license = license
if(this.billing_cycle_selected == "yearly" && this.license_focus == license.license_name){
this.presentAlertConfirmYearly()
}else if(this.billing_cycle_selected == "mon" && this.license_focus == license.license_name){
this.presentAlertConfirmMonth()
}else{
this.presentAlertConfirmMonth()
this.billing_cycle_selected = "mon"
}
}
//Send form to payfast
onSub(info) {
let license = this.selected_license;
this.payFacade.temporaryOrder(info, license);
}
payfastNav(){
this.userApi.getUser().subscribe(
(res) => {
this.router.navigate(["payment"], {
state: {
user_email: res.user.email,
selected_license: this.selected_license,
billing_cycle_selected: this.billing_cycle_selected }
});
}, err => {
console.log(err)
})
}
randomize() {
// Randomize chars
let length = 10;
let chars =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
backClicked() {
this._location.back();
}
next() {
this.slides.lockSwipes(false);
this.slides.slideNext();
this.slides.lockSwipes(true);
window.scroll(0, 0);
}
back() {
this.slides.lockSwipes(false);
this.slides.slidePrev();
this.slides.lockSwipes(true);
}
async showBasicPicker(slides) {
// this.slides = slides
let opts: PickerOptions = {
buttons: [
{
text: "cancel",
role: "cancel",
},
{
text: "done",
},
],
columns: [
{
name: "products",
options: [
{ text: "No Products", value: "default" },
{ text: "0 - 5", value: "5" },
{ text: "5 - 25", value: "25" },
{ text: "25 - 50", value: "50" },
{ text: "50 - 75", value: "75" },
{ text: "75 - 500", value: "500" },
{ text: "500 - 2500", value: "2500" },
{ text: "2500 - 5000", value: "5000" },
{ text: "unlimited", value: "unlimited" },
],
},
{
name: "users",
options: [
{ text: "No Users", value: "default" },
{ text: "1", value: "1" },
{ text: "2", value: "2" },
{ text: "2 - 5", value: "5" },
{ text: "5 - 20", value: "20" },
{ text: "20 - 40", value: "40" },
{ text: "40 - 100", value: "100" },
{ text: "100 - 200", value: "200" },
{ text: "unlimited", value: "unlimited" },
],
},
],
};
let picker = await this.pickerCtrl.create(opts);
picker.present();
picker.onDidDismiss().then(async (data) => {
let col = await picker.getColumn("products");
let col1 = await picker.getColumn("users");
this.number_products = col.options[col.selectedIndex].value;
this.number_users = col1.options[col1.selectedIndex].value;
console.log(this.number_products, this.number_users);
if(this.number_products == 'default'){
this.toastService.presentToast("select Your Number of Products")
}else if(this.number_users=="default"){
this.toastService.presentToast("select your Number of Users")
}else{
this.next();
this.outPutLicense();
this.show_licenses = true;
this.toastService.presentToast("Slide to see other licenses")
this.license_focus = this.recommended_license_index[0].license_name
setTimeout(() => {
this.license_focus ="none"
this.toastService.presentToast("Select billing cycle by pressing /mon or /Year")
}, 3000);
}
});
}
outPutLicense(){
let y;
this.recommended_license_index = this.licenses.filter(
license => {
return license.number_of_products == this.number_products || license.number_of_users == this.number_users
}
)
y = this.licenses.filter(
license =>{
return license !== this.recommended_license_index[0] //|| license !== this.recommended_license_index[1]
}
)
if( this.recommended_license_index.length > 1){
this.other_licenses = y.filter(
license =>{
return license !== this.recommended_license_index[1]
}
)
}else{
this.other_licenses = y
}
console.log(this.other_licenses)
console.log(this.recommended_license_index)
//for (let index = 0; index < this.licenses.length; index++) {
// if(this.number_products == this.licenses[index].number_of_products || this.number_users == this.licenses[index].number_of_users){
// this.recommended_license_index.push(this.licenses[index])
//y.push(index)
// }
// }
//for( let x = 0; x < y.length; x++){
// this.licenses.splice(y[x],1)
// }
}
slideChange(event) {
this.slides.getActiveIndex().then((data) => {
this.slides_number = data + 1;
if ( this.slides_number == 1) {
this.showBack = false;
} else if( this.slides_number > 1){
this.showBack = true;
}
});
}
async presentAlertConfirmMonth() {
const alert = await this.alertController.create({
cssClass: 'my-custom-class',
header: 'Confirm Billing Cycle!',
message: 'You have Chosen to pay Monthly',
buttons: [
{
text: 'Cancel',
role: 'cancel',
cssClass: 'secondary',
handler: (blah) => {
console.log('Confirm Cancel: blah');
}
}, {
text: 'Okay',
handler: () => {
this.paynow = true
this.slides.lockSwipes(false);
this.slides.slideNext();
this.slides.lockSwipes(true);
}
}
]
});
await alert.present();
}
async presentAlertConfirmYearly() {
const alert = await this.alertController.create({
cssClass: 'my-custom-class',
header: 'Confirm Billing Cycle!',
message: 'You have Chosen to pay Per Year',
buttons: [
{
text: 'Cancel',
role: 'cancel',
cssClass: 'secondary',
handler: (blah) => {
console.log('Confirm Cancel: blah');
}
}, {
text: 'Okay',
handler: () => {
this.paynow = true
this.slides.lockSwipes(false);
this.slides.slideNext();
this.slides.lockSwipes(true);
}
}
]
});
await alert.present();
}
showAnnual(license_name,data){
this.license_focus = license_name
this. billing_cycle_selected = data
}
}
<file_sep>import { Injectable } from "@angular/core";
import { Router } from "@angular/router";
import { ToastController } from "@ionic/angular";
import { OrdersApiService } from "../orders-api/orders-api.service";
import { OrdersStateService } from "../orders-state/orders-state.service";
@Injectable({
providedIn: "root",
})
export class OrdersFacadeService {
constructor(
private orderState: OrdersStateService,
private orderApi: OrdersApiService,
private router: Router,
public toastController: ToastController
) {}
updateOrder(data) {
//update order state
this.orderState.updateOrder(data.data);
//update database order item status
this.orderApi.updateOrderStatus(data).subscribe(
(res) => {
console.log(res);
this.orderState.updateOrder(res.order);
},
(err) => {
// TODO:
//we need to rollback
console.log(err);
}
);
}
getUserOrders$() {
// here we just pass the state without any projections
// it may happen that it is necessary to combine two or more streams and expose to the components
return this.orderState.getOrders$();
}
loadUserOrders() {
this.orderApi.getUserOrders().subscribe(
(res) => {
let orders = res.orders;
this.orderState.setOrders(orders);
},
(err) => {
console.log(err);
}
);
}
verifyOrderDelivery(data) {
//update order delivery
return this.orderApi.verifyOrderDelivery(data);
}
async presentToast(msg) {
const toast = await this.toastController.create({
color: "secondary",
message: msg,
duration: 3500,
});
toast.present();
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ActivityStateService } from "./activity-state.service";
describe("ActivityStateService", () => {
let service: ActivityStateService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ActivityStateService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>module.exports = {
//Front End server
server: "http://afrob.co.za",
};
<file_sep>import { Component, OnInit } from "@angular/core";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { Router } from "@angular/router";
import { ProductStateService } from "src/app/core-modules/services/products/product-state/product-state.service";
@Component({
selector: "app-shop-by-department",
templateUrl: "./shop-by-department.component.html",
styleUrls: ["./shop-by-department.component.scss"],
})
export class ShopByDepartmentComponent implements OnInit {
categories$;
public reset = 1;
public data;
constructor(
private categoryFacade: CategoryFacadeService,
private _productFacade: ProductFacadeService,
private productState: ProductStateService,
private router: Router
) {}
ngOnInit() {
//getCategories
this.categories$ = this.categoryFacade.getCategories$();
this.categories$.subscribe((res) => {
if (res.length == 0) {
this.data = true;
} else {
this.data = false;
}
});
}
count(i) {
this.reset += 1;
if (this.reset == 4) {
this.reset = 1;
}
switch (this.reset) {
case 1:
return " secondary";
break;
case 2:
return " norm";
break;
case 3:
return "tertiary";
break;
}
}
//load product categorie
loadCategory(category) {
localStorage.setItem("current_group", category);
this._productFacade.loadSegmentProducts(category, "category");
this.router.navigate(["/landing/all-products"]);
}
loadAllProducts() {
localStorage.setItem("all_p", "all departments");
this._productFacade.loadProducts();
this._productFacade.getProducts$().subscribe(
(res) => {
this.productState.setMainProducts(res);
this.productState.setSearchingProducts(res);
this.router.navigate(["/landing/all-products"]);
},
(err) => {}
);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-bom-listing-report',
templateUrl: './bom-listing-report.component.html',
styleUrls: ['./bom-listing-report.component.scss'],
})
export class BOMListingReportComponent implements OnInit {
constructor() { }
ngOnInit() {}
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class VoucherApiService {
_urlCreateVoucher = `${this.env.apiUrl}/api/voucher/create_voucher`;
_urlGetVoucher = `${this.env.apiUrl}/api/voucher/retrieve_voucher`;
_urlGetSpecialPromos = `${this.env.apiUrl}/api/voucher/retrieve_special_promos`;
_urlRemoveVoucher = `${this.env.apiUrl}/api/voucher/delete_voucher`;
_urlPostSpecialPromos = `${this.env.apiUrl}/api/voucher/create_special_promos`;
_urlGetStoreVouchers = `${this.env.apiUrl}/api/voucher/retrieve_store_voucher`;
_urlGetAllVouchers = `${this.env.apiUrl}/api/voucher/retrieve_all_vouchers`;
_urlGetStoreVouchersById = `${this.env.apiUrl}/api/voucher/retrieve_all_vouchersById`;
_urlGetVoucherByNumber = `${this.env.apiUrl}/api/voucher/retrieve_voucher_by_number`;
constructor(private _http: HttpClient, private env: EnvService) {}
getVoucher(id) {
console.log(id);
return this._http.post<any>(this._urlGetVoucher, id);
}
getSpecialPromos() {
return this._http.get<any>(this._urlGetSpecialPromos);
}
postSpecialPromos(data) {
return this._http.post<any>(this._urlPostSpecialPromos, data);
}
getVoucherByNumber(voucher_number) {
return this._http.post<any>(this._urlGetVoucherByNumber, voucher_number);
}
createVoucher(data) {
return this._http.post<any>(this._urlCreateVoucher, data);
}
getAllVouchers() {
return this._http.get<any>(this._urlGetAllVouchers);
}
getStoreVouchers() {
return this._http.get<any>(this._urlGetStoreVouchers);
}
getStoreVouchersById(id) {
return this._http.post<any>(this._urlGetStoreVouchersById, id);
}
updateVoucher(data) {
return this._http.put<any>(this._urlGetStoreVouchers, data);
}
deleteVoucher(data) {
return this._http.post<any>(this._urlRemoveVoucher, data);
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import {MatExpansionModule} from '@angular/material/expansion';
import { IonicRatingModule } from 'ionic4-rating';
//Detail modules
import { DetailsComponent } from './details/details.component';
import { SimilarProductsComponent } from'./similar-products/similar-products.component';
@NgModule({
declarations: [
DetailsComponent,
SimilarProductsComponent,
],
imports: [
CommonModule,
IonicRatingModule,
MatExpansionModule,
],
exports: [
DetailsComponent,
SimilarProductsComponent,
]
})
export class DetailComponentsModule { }
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class CategoryApiService {
_urlViewCategory = `${this.env.apiUrl}/api/category/viewCategory`;
constructor(private _http: HttpClient, private env: EnvService) {}
getCategories() {
return this._http.get<any>(this._urlViewCategory);
}
}
<file_sep>const passport = require('passport');
const GoogleStrategy = require('passport-google-oauth20');
const User = require("../../models/users/Google_auth");
const keys = require('./keys');
//attach user id to a cookie
passport.serializeUser((user, done) => {
done(null, user.id);
});
//retrieving id from a cookie and send to a browser
passport.deserializeUser((id, done) => {
User.findById({_id:id}).then( user => {
done(null, user);//attach user properties to req -->access to this page
}).catch(err=>console.log(err));
});
//Google Strategy
passport.use(new GoogleStrategy({
//options for googleStrategy
clientID: keys.google.clientID,
clientSecret: keys.google.clientSecret,
callbackURL: "/api/oauth/google/redirect"
}, (accessToken, refreshToken, profile, done) => {
//passport cb function
User.findOne({googleId: profile.id}).then((user) => {
if(user){
done(null, user); //pass user to serializeUser
}else{
new User({
googleId: profile.id,
name: profile.displayName,
email: profile.emails[0]
}).save().then((newuser)=>{
done(null, newuser); //pass user to serializeUser
}).catch(err => console.log(err));
}
}).catch(err => console.log(err));
})
)
<file_sep>import { TestBed } from "@angular/core/testing";
import { OrdersOperatorService } from "./orders-operator.service";
describe("OrdersOperatorService", () => {
let service: OrdersOperatorService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(OrdersOperatorService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { NgModule } from "@angular/core";
import { PreloadAllModules, RouterModule, Routes } from "@angular/router";
import { AuthGuard } from "./core-modules/guards/auth/auth.guard";
import { LayoutPage } from "./modules/admin-store/layout/layout.page";
const routes: Routes = [
{ path: "", redirectTo: "user", pathMatch: "full" },
//Auth routes
{
path: "auth",
loadChildren: () =>
import("./modules/auth/layout/layout.module").then(
(m) => m.LayoutAuthPageModule
),
},
//Users routes
{
path: "user",
loadChildren: () =>
import("./modules/user/landing/landing.module").then(
(m) => m.LandingPageModule
),
},
//Admin routes
{
path: "admin-store",
loadChildren: () =>
import("./modules/admin-store/layout/layout.module").then(
(m) => m.LayoutPageModule
),
canActivate: [AuthGuard],
},
{
path: "admin/product-addition",
loadChildren: () =>
import("./modules/admin-store/slides-layout/slides-layout.module").then(
(m) => m.SlidesLayoutPageModule
),
canActivate: [AuthGuard],
},
{
path: "vendor-profile/:id",
loadChildren: () =>
import("./shared/pages/vendor-profile/vendor-profile.module").then(
(m) => m.VendorProfilePageModule
),
},
{
path: "delivery-verification",
loadChildren: () =>
import(
"./shared/pages/delivery-verification/delivery-verification.module"
).then((m) => m.DeliveryVerificationPageModule),
},
//pa
//payments routes
{
path: "buy",
loadChildren: () =>
import("./modules/payments/payment/payment.module").then(
(m) => m.PaymentPageModule
),
canActivate: [AuthGuard],
},
{
path: "guest",
loadChildren: () =>
import("./modules/payments/pages/guest/guest.module").then(
(m) => m.GuestPageModule
),
},
{
path: "settings",
loadChildren: () =>
import("./modules/admin-store/pages/settings/settings.module").then(
(m) => m.SettingsPageModule
),
},
{
path: "store-selections",
loadChildren: () =>
import(
"src/app/modules/auth/pages/store-selections/store-selections.module"
).then((m) => m.StoreSelectionsPageModule),
},
{
path: "delivery-verification",
loadChildren: () =>
import(
"./shared/pages/delivery-verification/delivery-verification.module"
).then((m) => m.DeliveryVerificationPageModule),
},
{
path: "about",
loadChildren: () =>
import("./shared/pages/policies/policies.module").then(
(m) => m.PoliciesPageModule
),
},
{
path: "policies",
loadChildren: () =>
import("./shared/pages/policies/policies.module").then(
(m) => m.PoliciesPageModule
),
},
{
path: "guest",
loadChildren: () =>
import("./modules/payments/pages/guest/guest.module").then(
(m) => m.GuestPageModule
),
},
{
path: 'payment',
loadChildren: () => import('./modules/admin-store/pages/payment/payment.module').then( m => m.PaymentPageModule)
},
];
@NgModule({
imports: [
RouterModule.forRoot(routes, { preloadingStrategy: PreloadAllModules }),
],
exports: [RouterModule],
})
export class AppRoutingModule {}
<file_sep>(function (window) {
window.__env = window.__env || {};
// API urlhttp://localhost:3000
//http://afrob.co.za
//window.__env.apiUrl = "http://afrob.co.za";
//window.__env.apiUrl = "http://localhost:3000";
window.__env.apiUrl = "https://cartalist.herokuapp.com";
// Whether or not to enable debug mode
// Setting this to false will disable console output
window.__env.enableDebug = true;
})(this);
<file_sep>import { Component, OnInit } from '@angular/core';
import { ReportFacadeService } from "src/app/core-modules/services/reports/report-facade/report-facade.service"
import { ReportStateService } from 'src/app/core-modules/services/reports/report-state/report-state.service';
@Component({
selector: 'app-report-selector',
templateUrl: './report-selector.component.html',
styleUrls: ['./report-selector.component.scss'],
})
export class ReportSelectorComponent implements OnInit {
constructor( private reportState: ReportStateService,
private reportsFacade: ReportFacadeService) { }
ngOnInit() {
}
gotTo(data){
let obj = {
"a": "1"
}
this.reportState.updateReportStatus(obj)
}
}
<file_sep>import { Injectable } from "@angular/core";
import { ImageApiService } from "../image-api/image-api.service";
@Injectable({
providedIn: "root",
})
export class ImageFacadeService {
constructor(private imageApi: ImageApiService) {}
uploadImage(data) {
return this.imageApi.uploadImage(data);
}
deleteCloudImage(data) {
return this.imageApi.deleteCloudImage(data);
}
deleteMongoImage(product) {
return this.imageApi.deleteMongoImage(product);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { FormGroup, FormBuilder, Validators } from '@angular/forms';
import { AuthApiService } from 'src/app/core-modules/services/auth/auth-api/auth-api.service';
import { Router } from '@angular/router';
@Component({
selector: 'app-forgot-password',
templateUrl: './forgot-password.page.html',
styleUrls: ['./forgot-password.page.scss'],
})
export class ForgotPasswordPage implements OnInit {
private resetPasswordForm:FormGroup;
constructor(
private formBuilder: FormBuilder,
private _authService: AuthApiService,
private router: Router
) { }
ngOnInit() {
this.resetPasswordForm = this.formBuilder.group({
email:['',
Validators.compose([Validators.maxLength(50),
Validators.pattern('^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$'),
Validators.required])
]
})
}
onSubmit(){
this._authService.forgotPassword(this.resetPasswordForm.value).subscribe(
res => {
this.router.navigate(['/user/login'], {state: {data: res.message}});
},
error => console.error('Error: ', error)
)
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { RouterModule } from '@angular/router';
import { IonicModule } from '@ionic/angular';
import { FormsModule } from '@angular/forms';
import { ReactiveFormsModule } from '@angular/forms';
import { WidgetsnavComponent } from './widgetsnav/widgetsnav.component';
import { UsersComponent } from './widgets/users/users.component';
import {ProgressBarComponent} from './widgets/progress-bar/progress-bar.component';
import { ReportSelectorComponent } from './report-selector/report-selector.component';
import {MatExpansionModule} from '@angular/material/expansion';
import {MatSelectModule} from '@angular/material/select';
import { ChartsModule } from 'ng2-charts';
@NgModule({
declarations: [
WidgetsnavComponent,
UsersComponent,
ProgressBarComponent,
ReportSelectorComponent
],
imports: [
CommonModule,
RouterModule,
IonicModule,
ReactiveFormsModule,
FormsModule,
MatExpansionModule,
MatSelectModule,
ChartsModule
],
exports: [
WidgetsnavComponent,
UsersComponent,
ProgressBarComponent,
ReportSelectorComponent
],
entryComponents: [
]
})
export class DashboardComponentsModule { }
<file_sep>import { Component, OnInit, Input, ViewChild } from "@angular/core";
import { Router } from "@angular/router";
import {
FormGroup,
FormControl,
FormBuilder,
Validators,
} from "@angular/forms";
import { ProfileApiService } from "src/app/core-modules/services/profile/profile-api/profile-api.service";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { IonSlides } from "@ionic/angular";
import { Location } from "@angular/common";
import { ToastController } from "@ionic/angular";
import { UserFacadeService } from 'src/app/core-modules/services/profile/profile-facade/profile-facade.service';
import { MenuStateService } from 'src/app/core-modules/services/menus/menu-state/menu-state.service';
@Component({
selector: "app-store-register",
templateUrl: "./store-register.page.html",
styleUrls: ["./store-register.page.scss"],
})
export class StoreRegisterPage implements OnInit {
//later use if google users are allowed to have a store
//GoogleStrategy;
@ViewChild("registerStore", { static: true }) slides: IonSlides;
public business_registered = false;
private companyRegistrationForm: FormGroup;
private productsInfo: FormGroup;
public picture_uploaded: any;
public back;
public backTouser;
public returnUrl;
public showNext;
public showBack;
public out_of_bound;
public slides_number;
public show;
cities;
areas = [];
out_bound: FormGroup;
//addressFormGroup: FormGroup;
setProvince(province) {
let provi = this.south_africa.filter((prov) => {
return prov.province_name === province;
});
console.log(provi);
this.cities = provi[0].cities;
}
setArea(city) {
console.log(city);
let found = this.areas.filter((area) => {
return area === city;
});
if (found.length > 0) {
return null;
} else {
this.areas.push(city);
}
console.log(this.areas);
}
clearAreas(i_area) {
let cleared = this.areas.filter((area) => {
return area !== i_area;
});
this.areas = cleared;
}
constructor(
private formBuilder: FormBuilder,
private userApi: ProfileApiService,
private router: Router,
private productApi: ProductApiService,
private headerState: HeaderStateService,
private _location: Location,
public toastController: ToastController,
public menuState: MenuStateService,
private userFacade: UserFacadeService
) { }
ionViewDidEnter() {
this.slides.lockSwipes(true);
}
ngOnInit() {
this.back = this.backFunction();
this.showNext = true;
this.showBack = false;
this.backTouser = this.userBackFunction();
this.returnUrl = this.headerState.return_route;
this.slides.getActiveIndex().then((data) => {
this.slides_number = data + 1;
});
this.companyRegistrationForm = this.formBuilder.group({
name: [
"",
Validators.compose([Validators.minLength(3), Validators.required]),
],
tag_line: [
"",
Validators.compose([Validators.maxLength(60), Validators.required]),
],
email: [
"",
Validators.compose([
Validators.maxLength(320),
Validators.pattern(
"^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$"
),
Validators.required,
]),
],
phone: [
"",
Validators.compose([Validators.minLength(10), Validators.required]),
],
});
this.productsInfo = this.formBuilder.group({
delivery: new FormControl(),
comp_reg_num: new FormControl(),
});
this.out_bound = this.formBuilder.group({
out_bound_fee: new FormControl(),
});
/*this.addressFormGroup = new FormGroup({
address: new FormControl(),
});
this.addressFormGroup
.get("address")
.valueChanges.subscribe((value) => console.log("value changed", value));
*/
}
submit() {
let c_register = {
...this.companyRegistrationForm.value,
logo: this.picture_uploaded,
out_bound_fee: this.out_bound.value.out_bound_fee,
free_delivery: this.areas,
};
this.userApi.storeRegister(c_register).subscribe(
(res) => {
this.userApi.storeProfiling(this.productsInfo.value).subscribe(
(result) => {
console.log(result)
let store = result.store
let user = result.user
this.userFacade.loadUser()
this.menuState.updateMenuStatus("admin");
this.userFacade.loadStoreProfile(store._id);
this.router.navigate(["/admin-store/store"]);
},
(err) => {
this.presentToast("Error! ");
console.log(err);
}
);
},
(err) => this.presentToast("Error! incomplete fields ")
);
}
sendToggleVlaue(data) {
this.business_registered = data.detail.checked;
}
outOfBound(data) {
this.out_of_bound = data.detail.checked;
}
handleFileInput(event) {
let file = event.target.files[0];
if (file.type == "image/jpeg" || file.type == "image/png") {
const _formData = new FormData();
_formData.append("image", file);
//save picture
this.productApi.uploadImage(_formData).subscribe((res) => {
this.picture_uploaded = res.data;
console.log(this.picture_uploaded);
});
}
}
onClickBack() {
this.router.navigate([this.returnUrl]);
}
next() {
this.slides.lockSwipes(false);
this.slides.slideNext();
this.slides.lockSwipes(true);
}
slidesBack() {
this.slides.lockSwipes(false);
this.slides.slidePrev();
this.slides.lockSwipes(true);
}
backFunction() {
if (this.headerState.return_route == "/user/stores") {
return true;
} else {
return false;
}
}
userBackFunction() {
if (this.headerState.return_route == "fromUser") {
return true;
} else {
return false;
}
}
backToUser() {
this._location.back();
}
slideChange(event) {
this.slides.getActiveIndex().then((data) => {
this.slides_number = data + 1;
if (data == 3) {
this.showNext = false;
this.showBack = true;
} else if (data == 0) {
this.showBack = false;
this.showNext = true;
} else {
this.showNext = true;
this.showBack = true;
}
});
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 2000,
position: "bottom",
});
toast.present();
}
public south_africa = [
{
province_name: "Limpopo",
cities: [
"Polokwane",
"Giyane",
"Lebowakgomo",
"Musina",
"Phalaborwa",
"Seshego",
"Sibasa",
"Mokopane",
"Thohoyandou",
"Thabazimbi",
],
},
{
province_name: "Gauteng",
cities: [
"Benoni",
"Boksburg",
"Brakpan",
"Carletonville",
"Germiston",
"Johannesburg",
"Krugersdorp",
"Randburg",
"Randfontein",
"Roodeport",
"Pretoria",
"Soweto",
"Springs",
"Vanderbijlpark",
"Vereeniging",
],
},
{
province_name: "Kwazulu Natal",
cities: [
"Durban",
"Empangeni",
"Ladysmith",
"Newcastle",
"Pietermaritzburg",
"Pinetown",
"Ulundi",
"Umlazi",
],
},
{
province_name: "North West",
cities: [
"Klerksdorp",
"Mahikeng",
"Mmabatho",
"Potchefstroom",
"Rustenburg",
],
},
{
province_name: "Northen Cape",
cities: [
"Kimberley",
"Kuruman",
"Port Nolloth",
"Upington",
"De Aar",
"Bellville",
],
},
{
province_name: "Western Cape",
cities: [
"Bellville",
"Cape Town",
"Constantia",
"George",
"Hopefield",
"Oudtshoorn",
"Paarl",
"Simon’s Town",
"Stellenbosch",
"Swellendam",
"Worcester",
],
},
{
province_name: "Mpumalanga",
cities: [
"Emalahleni",
"Nelspruit",
"Secunda",
"Bushbuckridge",
"Hazeyview",
],
},
{
province_name: "Free State",
cities: [
"Bethlehem",
"Bloemfontein",
"Jagersfontein",
"Kroonstad",
"Odendaalsrus",
"Parys",
"Phuthaditjhaba",
"Sasolburg",
"Virginia",
"Welkom",
],
},
{
province_name: "Eastern Cape",
cities: [
"Alice",
"Butterworth",
"Jagersfontein",
"Graaff-Reinet",
"Grahamstown",
"Parys",
"Phuthaditjhaba",
"King William’s Town",
"Mthatha",
"Port Elizabeth",
"Queenstown",
"Uitenhage",
"Zwelitsha",
],
},
];
}
<file_sep>const mongoose = require("mongoose");
// User Schema
const Schema = mongoose.Schema;
// Facebook Schema
const FacebookSchema = new Schema({
facebookId:{
type: String,
required: true
},
name: {
type: String,
required: true
},
email:{
type: mongoose.Schema.Types.Mixed
},
date: {
type: Date,
default: Date.now
}
});
// User model
const UserFB = mongoose.model('facebook_user', FacebookSchema);
module.exports = UserFB;
<file_sep>import { Component, OnInit } from "@angular/core";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { IonSlides } from "@ionic/angular";
import { CartService } from "src/app/core-modules/services/cart/cart-state/cart.service";
@Component({
selector: "app-checkout",
templateUrl: "./checkout.page.html",
styleUrls: ["./checkout.page.scss"],
})
export class CheckoutPage implements OnInit {
public token;
public path = true;
public profile$;
public modify;
public cart;
public paynow;
public edit;
public showNext;
public showBack;
constructor(
private userFacade: UserFacadeService,
private router: Router,
private headerState: HeaderStateService,
private cartService: CartService
) {}
ngOnInit() {
this.showNext = true;
this.showBack = false;
this.token = !!localStorage.getItem("token");
this.userFacade.getUser$().subscribe(
(res) => {
this.profile$ = res;
},
(err) => {
console.log(err);
}
);
this.cart = this.cartService.getCart();
}
logIn() {
this.router.navigate(["/auth"]);
this.headerState.setReturnRoute("/buy");
}
goToGuest() {
this.router.navigate(["/guest"]);
}
editAddress() {
this.modify = true;
}
next(slide) {
slide.lockSwipes(false);
slide.slideNext();
slide.lockSwipes(true);
this.paynow = true;
}
slidesBack(slide) {
slide.lockSwipes(false);
slide.slidePrev();
slide.lockSwipes(true);
}
decreaseCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.decreaseProduct(sec_pro[0]);
}
// if any
this.cartService.decreaseProduct(product);
}
increaseCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.increaseProduct(sec_pro[0]);
}
// if any
this.cartService.increaseProduct(product);
}
removeCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.removeProduct(sec_pro[0]);
}
// if any
this.cartService.removeProduct(product);
}
edits(product){
if(product == this.edit){
this.edit = null
}else{
this.edit = product
}
}
selected(data){
if (this.edit == data)
return "grey"
}
slideChange(event,slides) {
slides.getActiveIndex().then((data) => {
if (data == 1) {
this.showNext = false;
this.showBack = true;
} else if (data == 0) {
this.showBack = false;
this.showNext = true;
} else {
this.showNext = true;
this.showBack = true;
}
});
}
}
<file_sep>import { NgModule } from "@angular/core";
import { Routes, RouterModule } from "@angular/router";
import { AuthGuard } from "src/app/core-modules/guards/auth/auth.guard";
import { LayoutPage } from "./layout.page";
const routes: Routes = [
{
path: "user",
component: LayoutPage,
children: [
{
path: "user_profile",
loadChildren: () =>
import("src/app/modules/auth/pages/profile/profile.module").then(
(m) => m.ProfilePageModule
),
canActivate: [AuthGuard],
},
{
path: "return",
loadChildren: () =>
import("src/app/modules/auth/pages/profile/returns/returns.module").then(
(m) => m.ReturnsPageModule
),
canActivate: [AuthGuard],
},
{
path: "login",
loadChildren: () =>
import("src/app/modules/auth/pages/login/login.module").then(
(m) => m.LoginPageModule
),
},
{
path: "register",
loadChildren: () =>
import("src/app/modules/auth/pages/register/register.module").then(
(m) => m.RegisterPageModule
),
},
{
path: "welcome",
loadChildren: () =>
import("src/app/modules/auth/pages/welcome/welcome.module").then(
(m) => m.WelcomePageModule
),
canActivate: [AuthGuard],
},
{
path: "store-register",
loadChildren: () =>
import(
"src/app/modules/auth/pages/welcome/store-register/store-register.module"
).then((m) => m.StoreRegisterPageModule),
canActivate: [AuthGuard],
},
{
path: "update-info",
loadChildren: () =>
import(
"src/app/modules/auth/pages/welcome/update-info/update-info.module"
).then((m) => m.UpdateInfoPageModule),
canActivate: [AuthGuard],
},
{
path: "google",
loadChildren: () =>
import("src/app/modules/auth/pages/google/google.module").then(
(m) => m.GooglePageModule
),
},
{
path: "verify",
loadChildren: () =>
import("src/app/modules/auth/shared/verify/verify.module").then(
(m) => m.VerifyPageModule
),
},
{
path: "forgot-password",
loadChildren: () =>
import(
"src/app/modules/auth/shared/forgot-password/forgot-password.module"
).then((m) => m.ForgotPasswordPageModule),
},
{
path: "user/reset-password",
loadChildren: () =>
import(
"src/app/modules/auth/shared/reset-password/reset-password.module"
).then((m) => m.ResetPasswordPageModule),
},
{
path: "stores",
loadChildren: () =>
import(
"src/app/modules/auth/pages/store-selections/store-selections.module"
).then((m) => m. StoreSelectionsPageModule),
},
{
path: "",
redirectTo: "/auth/user/user_profile",
pathMatch: "full",
},
],
},
{
path: "",
redirectTo: "/auth/user/user_profile",
pathMatch: "full",
},
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class LayoutPageRoutingModule {}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { IonicRatingModule } from 'ionic4-rating';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { OrdersComponent } from "./orders/orders.component";
import { NotificationsComponent} from "./notifications/notifications.component";
import { ProductReviewComponent} from "./product-review/product-review.component";
import { AddReviewComponent} from "./add-review/add-review.component";
import {QueryComponent} from "./orders/query/query.component"
import {SettingsComponent} from "./settings/settings.component";
import {MatExpansionModule} from '@angular/material/expansion';
import { RouterModule } from '@angular/router';
import {MatStepperModule} from '@angular/material/stepper';
@NgModule({
declarations: [
OrdersComponent,
NotificationsComponent,
SettingsComponent,
ProductReviewComponent,
AddReviewComponent,
QueryComponent
],
imports: [
CommonModule,
MatExpansionModule,
IonicRatingModule,
FormsModule,
ReactiveFormsModule,
RouterModule,
MatStepperModule
],
exports:[
OrdersComponent,
NotificationsComponent,
SettingsComponent,
ProductReviewComponent,
AddReviewComponent,
QueryComponent
]
})
export class ProfileComponentsModule { }
<file_sep>import {
Component,
OnInit,
OnChanges,
Input,
ViewChild,
SimpleChanges,
OnDestroy,
} from "@angular/core";
import { trigger, style, animate, transition } from "@angular/animations";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { Router, ActivatedRoute } from "@angular/router";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { IonSlides, ToastController } from "@ionic/angular";
import { distinctUntilChanged } from "rxjs/operators";
@Component({
selector: "app-details",
templateUrl: "./details.component.html",
styleUrls: ["./details.component.scss"],
animations: [
trigger("inOutAnimation", [
transition(":enter", [
style({ transform: "translateY(-45vh)", opacity: 1, color: "white" }),
animate("1s ease-out", style({ opacity: 1 })),
]),
/*transition(
':leave',
[
style({ opacity: 1 }),
animate('1s ease-in',
style({ height: 0, opacity: 0 }))
]
)*/
]),
],
})
export class DetailsComponent implements OnInit, OnChanges, OnDestroy {
@Input() product_id: string;
public fullDescription = false;
public product$;
public product;
public cart$;
public profile;
public seeBOM;
public has_BOM;
public components_incuded: any[] = [];
public features;
@Input() detail_product;
//the product$ should have tche component as this
public components = [];
rate;
public show_properties;
panelOpenState;
classZero = "active_expansion";
classOne = "not_active_expansion";
classTwo = "not_active_expansion";
step = 0;
@ViewChild("productDetailSlide", { static: true }) slides: IonSlides;
constructor(
private cartFacade: CartFacadeService,
private _productFacade: ProductFacadeService,
private headerState: HeaderStateService,
private route: Router,
private activeRoute: ActivatedRoute,
private userFacade: UserFacadeService,
public toastController: ToastController
) {}
ngOnChanges(changes: SimpleChanges) {
console.log(changes);
this.product$ = this.detail_product;
setTimeout(() => {
console.log("now...");
}, 1000);
}
ngOnInit() {
this.userFacade.loadUser();
this.seeBOM = false;
/*this.product$ = this._productFacade
.getCurrentProduct()
.pipe(distinctUntilChanged());*/
this.product$.subscribe(
(res) => {
this.product = res[0];
//here check if the products has a bill of material if has set has_BOM to true
if (res[0]) {
if (res[0].bill_of_material) {
this.has_BOM = true;
//push to components only if selected is false
this.components = res[0].bill_of_material.filter((item) => {
return !item.selected;
});
this.components_incuded = res[0].bill_of_material.filter((item) => {
return item.selected;
});
} else {
this.components_incuded = [];
this.components = [];
}
}
},
(err) => {
console.log(err);
}
);
this.userFacade.getUser$().subscribe(
(res) => {
this.profile = res;
},
(err) => {
console.log(err);
}
);
this.show_properties = true;
setTimeout(function () {
this.show_properties = false;
}, 2000);
}
addBOM(chip, chipID) {
let new_product = this.product;
chip.selected = true;
this.components_incuded.push(chip);
let c = this.components.filter((item) => {
return item._id !== chipID;
});
this.components = c;
new_product["bill_of_material"] = [
...this.components_incuded,
...this.components,
];
//modify product price
let new_price;
if (chip.BOM_type == "material") {
new_price =
this.product.productPrice -
parseInt(chip.base_material_price) +
parseInt(chip.price);
//construct bom
new_product["modified_price"] = new_price;
new_product["productPrice"] = new_price;
} else {
new_price = this.product.productPrice + parseInt(chip.price);
new_product["modified_price"] = new_price;
new_product["productPrice"] = new_price;
}
this._productFacade.setCurrentProduct([new_product]);
}
showBOM(data) {
this.seeBOM = data;
}
removeBOM(chip, chipID) {
let new_product = this.product;
chip.selected = false;
this.components.push(chip);
let c = this.components_incuded.filter((item) => {
return item._id !== chipID;
});
this.components_incuded = c;
new_product["bill_of_material"] = [
...this.components_incuded,
...this.components,
];
//modify product price
let new_price;
if (chip.BOM_type == "material") {
new_price =
this.product.productPrice -
parseInt(chip.price) +
parseInt(chip.base_material_price);
new_product["modified_price"] = new_price;
new_product["productPrice"] = new_price;
} else {
new_price = this.product.productPrice - parseInt(chip.price);
new_product["modified_price"] = new_price;
new_product["productPrice"] = new_price;
}
this._productFacade.setCurrentProduct([new_product]);
}
disProperties(data) {
this.show_properties = data;
}
onRateChange(data) {
console.log(data);
}
setStep(index: number) {
this.step = index;
switch (this.step) {
case 0:
this.classZero = "active_expansion";
this.classOne = "not_active_expansion";
this.classTwo = "not_active_expansion";
break;
case 1:
this.classZero = "not_active_expansion";
this.classOne = "active_expansion";
this.classTwo = "not_active_expansion";
break;
case 2:
this.classZero = "not_active_expansion";
this.classOne = "not_active_expansion";
this.classTwo = "active_expansion";
break;
}
}
rating(ratings) {
//get AVERAGE RATING
let total = 0;
for (let i = 0; i < ratings.length; i++) {
total += ratings[i].rate;
}
let avg = total / ratings.length;
return avg;
}
nextStep() {
this.step++;
}
prevStep() {
this.step--;
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
vendorPage() {
this.route.navigate(["/vendor-profile/", this.product.storeId]);
}
likeA(product) {
this._productFacade.updateLike(product);
}
showFull() {
this.fullDescription = true;
}
showLess() {
this.fullDescription = false;
}
async presentSaleToast(data) {
const toast = await this.toastController.create({
message: data,
position: "bottom",
animated: true,
duration: 2000,
});
toast.present();
}
next2() {
this.slides.slideNext();
this.slides.slideNext();
this.presentSaleToast("slide to go back");
}
next() {
this.slides.slideNext();
this.presentSaleToast("slide to see more");
}
ngOnDestroy() {
this._productFacade.resetCurrentProduct();
}
/*decreaseItem(product){
for(let cart_p of this.cart$){
if(product._id == cart_p._id){
this.decreasing(product)
}else{
return null
}
}
}
decreasing(product){
//if combo look for sec product
if(product.secondary_product){
let sec_pro = this.cart$.filter(item => {
return item._id == product.secondary_product._id
})
this.cartService.decreaseProduct(sec_pro[0])
}
// if any
this.cartService.decreaseProduct(product);
}
increaseItem(product){
for(let cart_p of this.cart$){
if(product._id == cart_p._id){
this.adding(product)
}else{
this.cartFacade.addToCart(product);
this.adding(product)
}
}
}
adding(product){
//if combo look for sec product
if(product.secondary_product){
let sec_pro = this.cart$.filter(item => {
return item._id == product.secondary_product._id
})
this.cartService.increaseProduct(sec_pro[0])
}
// if any
this.cartService.increaseProduct(product)
}
removeCartItem(product){
for(let cart_p of this.cart$){
if(product._id == cart_p._id){
this.removing(product)
}else{
return null
}
}
}
removing(product){
//if combo look for sec product
if(product.secondary_product){
let sec_pro = this.cart$.filter(item => {
return item._id == product.secondary_product._id
})
this.cartService.removeProduct(sec_pro[0])
}
// if any
this.cartService.removeProduct(product)
}*/
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { ChartDataSets, ChartType, RadialChartOptions } from 'chart.js';
import { Label } from 'ng2-charts';
@Component({
selector: 'app-rader-chart',
templateUrl: './rader-chart.component.html',
styleUrls: ['./rader-chart.component.scss'],
})
export class RaderChartComponent implements OnInit {
// Radar
public radarChartOptions: RadialChartOptions = {
responsive: true,
};
public radarChartLabels: Label[] = ['Eating', 'Drinking', 'Sleeping', 'Designing', 'Coding', 'Cycling', 'Running'];
public radarChartData: ChartDataSets[] = [
{ data: [65, 59, 0, 81, 56, 55, 40], label: 'Series A' },
{ data: [28, 48, 40, 19, 96, 27, 2], label: 'Series B' }
];
public radarChartType: ChartType = 'radar';
public radarColors= [
{ // grey
backgroundColor: 'rgba(1, 135, 134,0.5)',
borderColor: 'rgba(1, 135, 134,1)',
pointBackgroundColor: 'rgba(1, 135, 134,1)',
pointBorderColor: '#fff',
pointHoverBackgroundColor: '#fff',
pointHoverBorderColor: 'rgba(1, 135, 134,0.8)'
},
{ // dark grey
backgroundColor: 'rgba(255, 215, 0,0.5)',
borderColor: 'rgba(255, 215, 0,1)',
pointBackgroundColor: 'rgba(255, 215, 0,1)',
pointBorderColor: '#fff',
pointHoverBackgroundColor: '#fff',
pointHoverBorderColor: 'rgba(255, 215, 0,1)'
},
{ // red
backgroundColor: 'rgba(144, 222, 215,0.5)',
borderColor: 'rgba(144, 222, 215,1)',
pointBackgroundColor: 'rgba(144, 222, 215,1)',
pointBorderColor: '#fff',
pointHoverBackgroundColor: '#fff',
pointHoverBorderColor: 'rgba(144, 222, 215,0.8)'
}
];
constructor() { }
ngOnInit() {}
// events
public chartClicked({ event, active }: { event: MouseEvent, active: {}[] }): void {
console.log(event, active);
}
public chartHovered({ event, active }: { event: MouseEvent, active: {}[] }): void {
console.log(event, active);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
@Injectable({
providedIn: "root",
})
export class VoucherStateService {
private updating$ = new BehaviorSubject<boolean>(false);
private vouchers_length$ = new BehaviorSubject<number>(0);
private vouchers$ = new BehaviorSubject<any[]>([]);
private vouchers_products$ = new BehaviorSubject<any[]>([]);
private store_vouchers = new BehaviorSubject<any[]>([]);
constructor() {}
resetVouchers() {
this.vouchers$.next([]);
this.vouchers_products$.next([]);
this.vouchers_length$.next(0);
}
getStoreVouchers() {
return this.store_vouchers.asObservable();
} // return updating$ status
isUpdating$() {
return this.updating$.asObservable();
}
// change updating$ status
setUpdating(isUpdating: boolean) {
this.updating$.next(isUpdating);
}
// return vouchers$ state
getVouchers$() {
return this.vouchers$.asObservable();
}
getVouchersLength$() {
return this.vouchers_length$.asObservable();
}
setStoreVouchers(vouchers) {
this.store_vouchers.next(vouchers);
}
// Load new set of vouchers
setVouchers(voucher, array_length) {
const currentValue = this.vouchers$.getValue();
this.store_vouchers.next(voucher);
// modified voucher product stream
this.vouchers$.next(currentValue.concat([voucher]));
if (array_length) {
this.vouchers_length$.next(array_length);
}
}
// add new voucher to vouchers$ state
addVoucher(voucher) {
const currentValue = this.vouchers$.getValue();
this.vouchers$.next(currentValue.concat([voucher]));
}
// update voucher in vouchers$ state
updateVoucher(updatedVoucher) {
const vouchers = this.vouchers$.getValue();
const indexOfUpdated = vouchers.findIndex((voucher) => {
voucher.id === updatedVoucher.id;
});
vouchers[indexOfUpdated] = updatedVoucher;
this.vouchers$.next([vouchers]);
}
// remove voucher from vouchers$
removeVoucher(voucherRemove) {
const currentValue = this.store_vouchers.getValue();
this.store_vouchers.next(
currentValue.filter((voucher) => voucher !== voucherRemove)
);
}
/****voucher products user handle****/
// return all vouchers_products$ state
getVouchersProducts$() {
return this.vouchers_products$.asObservable();
}
// Load new set of products
setVouchersProducts(products) {
this.vouchers_products$.next(products);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class ActivityApiService {
_urlPushActivity = `${this.env.apiUrl}/api/activities/pushActivity`;
_urlgetActivities = `${this.env.apiUrl}/api/activities/getActivities`;
_urlPushNotification = `${this.env.apiUrl}/api/notifications/pushNotification`;
_urlgetNotifications = `${this.env.apiUrl}/api/notifications/getNotifications`;
_urlUpdateNotifications = `${this.env.apiUrl}/api/notifications/updateNotifications`;
_urlUpdateReport = `${this.env.apiUrl}/api/activities//updateReport`;
constructor(private _http: HttpClient, private env: EnvService) {}
pushActivity(data) {
return this._http.post<any>(this._urlPushActivity, data);
}
getActivities(data) {
return this._http.post<any>(this._urlgetActivities, data);
}
pushNotification(data) {
return this._http.post<any>(this._urlPushNotification, data);
}
getNotifications() {
return this._http.get<any>(this._urlgetNotifications);
}
updateNotifications(data) {
return this._http.post<any>(this._urlUpdateNotifications, data);
}
updateReports(data) {
return this._http.post<any>(this._urlUpdateReport, data);
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { FormGroup, FormControl } from "@angular/forms";
import { Router } from "@angular/router";
@Component({
selector: "app-add-review",
templateUrl: "./add-review.component.html",
styleUrls: ["./add-review.component.scss"],
})
export class AddReviewComponent implements OnInit {
public product$;
reviewForm = new FormGroup({
rate: new FormControl(""),
comment: new FormControl(""),
});
constructor(
private router: Router,
private _productFacade: ProductFacadeService
) {}
ngOnInit() {
this.product$ = this._productFacade.getCurrentProduct();
}
onSubmit() {
//construct the ratings for backend.
this.product$.subscribe(
(res) => {
let data = {
_id: res[0]._id,
rate: this.reviewForm.value.rate,
comment: "none",
};
this._productFacade.updateReview(data);
//change segment
},
(err) => {
console.log(err);
}
);
}
}
<file_sep>export var Data:any = [
{
product_id: 'sjjkdhisjklhdjkhfjsklhdhk',
productName:"bike",
amount: 3,
cost:1215,
discount:12,
category:"food",
date:'2020-06-12T21:02:34.337+00:00'
},
];<file_sep>import { Component, OnInit } from "@angular/core";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { Location } from "@angular/common";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
@Component({
selector: "app-profile",
templateUrl: "./profile.page.html",
styleUrls: ["./profile.page.scss"],
})
export class ProfilePage implements OnInit {
public bottom_component = "default";
public title;
//observables
profile$;
isUpdating$;
orderList$;
order_list;
productReviews$;
reviews
constructor(
private _productFacade: ProductFacadeService,
private orderFacade: OrderFacadeService,
private _location: Location,
private userFacade: UserFacadeService
) {
////loading
this.isUpdating$ = this.userFacade.isUpdating$();
}
ngOnInit() {
this.title = "Account";
this.userFacade.loadUser();
//get User profile
this.userFacade.getUser$().subscribe(
(res) => {
this.profile$ = res;
console.log(res)
},
(err) => {
console.log(err);
}
);
console.log(this.profile$);
this.orderFacade.loadUserOrders();
this.orderList$ = this.orderFacade.getUserOrders$();
this.orderList$.subscribe(
res => {
this.order_list = res.filter(order => !order.fullfilled ).length
}
)
this._productFacade.loadUserReviews();
this.productReviews$ = this._productFacade.getUserReviews();
this.productReviews$.subscribe(
res => {
if(res){
this.reviews = res.length
}else{
this.reviews = 0
}
}
)
}
goBack() {
if (this.bottom_component === "default") {
this._location.back();
} else {
this.bottom_component = "default";
this.title = "Account";
}
}
goTo(data) {
this.bottom_component = data;
this.title = data;
}
received(data) {
this.goTo(data);
}
activeOrders(){
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ModalController } from "@ionic/angular";
import { Router } from "@angular/router";
import { CartService } from "src/app/core-modules/services/cart/cart-state/cart.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { VoucherApiService } from "src/app/core-modules/services/vouchers/voucher-api/voucher-api.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
@Component({
selector: "app-cart-modal",
templateUrl: "./cart-modal.component.html",
styleUrls: ["./cart-modal.component.scss"],
})
export class CartModalComponent implements OnInit {
cart = [];
token;
public profile$;
public apply_voucher;
constructor(
private cartService: CartService,
private modalCtrl: ModalController,
private router: Router,
private userFacade: UserFacadeService,
private headerState: HeaderStateService,
private voucherAPI: VoucherApiService,
private cartFacade: CartFacadeService
) {}
ngOnInit() {
this.cart = this.cartService.getCart();
this.token = !!localStorage.getItem("token");
this.userFacade.getUser$().subscribe(
(res) => {
this.profile$ = res;
},
(err) => {
console.log(err);
}
);
this.getVoucherProduct();
}
getVoucherProduct() {
this.voucherAPI
.getVoucherByNumber({ voucher_number: "y3WlnoZpfW" })
.subscribe(
(res) => {
console.log(res.voucher);
res.voucher.products.forEach((p) => {
let promo = res.voucher;
//if voucher
if (!promo.run_sale) {
let type = promo.type;
let discount = p.discount;
let product = p.prod;
let items_exceeding = p.items_exceeding;
let quota = p.p_quota;
let secondary_product = p.s_pro;
let modified_price;
let product_bind;
switch (type) {
case "itemDiscount":
modified_price = product.productPrice - discount;
this.modifyProduct(
null,
product,
modified_price,
product.amount,
type,
null,
null
);
break;
case "buy1get1free":
this.modifyProduct(
null,
product,
product.productPrice,
product.amount,
type,
null,
null
);
break;
case "volume":
let sub_total =
product.productPrice * items_exceeding - discount;
let new_item_price = sub_total / items_exceeding;
this.modifyProduct(
null,
product,
new_item_price,
items_exceeding,
type,
null,
null
);
break;
case "combo":
product_bind = [product._id, secondary_product._id];
modified_price = secondary_product.productPrice - discount;
//minify this script
let secondary_product_b = {
amount: secondary_product.amount,
category: secondary_product.category,
productDescription: secondary_product.productDescription,
productImage: secondary_product.productImage,
productName: secondary_product.productName,
productPrice: modified_price,
original_price: secondary_product.productPrice,
storeId: secondary_product.stosreId,
sale: secondary_product.sale,
sale_type: secondary_product.sale_type,
product_bind: product_bind,
promo_kind: type,
thresh: secondary_product.amount,
_id: secondary_product._id,
};
this.modifyProduct(
null,
product,
product.productPrice,
product.amount,
type,
product_bind,
secondary_product_b
);
this.modifyProduct(
true,
secondary_product,
secondary_product_b.productPrice,
secondary_product.amount,
type,
product_bind,
null
);
break;
}
}
});
},
(err) => {
console.log(err);
}
);
}
modifyProduct(
sec,
product,
modified_price,
amount,
promo_kind,
product_bind,
secondary_product
) {
let mProduct = {
sec: sec,
amount: amount,
category: product.category,
productDescription: product.productDescription,
productImage: product.productImage,
productName: product.productName,
productPrice: modified_price,
original_price: product.productPrice,
storeId: product.storeId,
sale: true,
sale_type: promo_kind,
product_bind: product_bind,
promo_kind: promo_kind,
secondary_product: secondary_product,
thresh: amount,
_id: product._id,
};
this.cartFacade.addToCart(mProduct);
}
onAmountChange(p, value) {
if (p.promo || p.sale) {
console.log(p.amount);
} else {
for (let product of this.cart) {
if (product._id == p._id) {
console.log(product.amount);
}
}
}
}
decreaseCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.decreaseProduct(sec_pro[0]);
}
// if any
this.cartService.decreaseProduct(product);
}
increaseCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.increaseProduct(sec_pro[0]);
}
// if any
this.cartService.increaseProduct(product);
}
removeCartItem(product) {
//if combo look for sec product
if (product.secondary_product) {
let sec_pro = this.cart.filter((item) => {
return item._id == product.secondary_product._id;
});
this.cartService.removeProduct(sec_pro[0]);
}
// if any
this.cartService.removeProduct(product);
}
getTotal() {
let total = this.cart.reduce((i, j) => i + j.productPrice * j.amount, 0);
localStorage.setItem("total", total);
return total;
}
dismiss() {
this.modalCtrl.dismiss();
}
clearItems() {
localStorage.removeItem("cart");
this.cart = [];
this.cartService.clearCart();
}
checkout(cart) {
// set timeOut UserExperience
if (cart.length > 0) {
setTimeout(() => {
this.modalCtrl.dismiss();
}, 1);
if (
this.token &&
!this.profile$.first_name &&
!this.profile$.last_name &&
!this.profile$.address
) {
this.headerState.setReturnRoute("/buy");
this.router.navigate(["/user/update-info"]);
} else {
this.router.navigate(["/payment/checkout"]);
}
} else {
this.modalCtrl.dismiss();
}
console.log(cart);
}
applyVoucher(){
if(this.apply_voucher){
//apply vousher please set it back to false
}else{
this.apply_voucher = true
}
}
}
<file_sep>import {
Component,
OnInit,
ViewChild,
Output,
EventEmitter,
OnChanges,
} from "@angular/core";
import { HostListener } from "@angular/core";
import { IonInfiniteScroll } from "@ionic/angular";
import { distinctUntilChanged } from "rxjs/operators";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { VoucherFacadeService } from "src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-promo",
templateUrl: "./promo.component.html",
styleUrls: ["./promo.component.scss"],
})
export class PromoComponent implements OnInit, OnChanges {
//notify sale page to change segment
@Output() notifyParent: EventEmitter<any> = new EventEmitter();
public all_vouchers;
no_promo;
user_id;
item_tresh;
device_screen;
scrHeight: any;
scrWidth: any;
desktopViewvoucher ;
expand_detail;
detail_view_product
@ViewChild(IonInfiniteScroll) infiniteScroll: IonInfiniteScroll;
@HostListener("window:resize", ["$event"])
getScreenSize(event?) {
this.scrHeight = window.innerHeight;
this.scrWidth = window.innerWidth;
}
//Observables
updating$;
vouchers$;
profile$;
vouchers_length$;
constructor(
private userFacade: UserFacadeService,
private voucherFacade: VoucherFacadeService,
breakpointObserver: BreakpointObserver
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
this.getScreenSize();
////loading
this.updating$ = this.voucherFacade.isUpdating$();
}
ngOnChanges() {}
ngOnInit() {
this.item_tresh = Math.round(this.scrHeight / 50);
// get user and assign user_id
this.profile$ = this.userFacade.getUser$();
this.user_id = this.profile$._id;
//get voucher length
this.voucherFacade
.getStoreVouchers()
.pipe(distinctUntilChanged())
.subscribe((res) => {
console.log(res.length);
this.vouchers$ = res;
this.vouchers_length$ = res.length;
// get store vouchers and prepare them for infiniteScroll
console.log(res);
if (this.vouchers_length$ == 0) {
this.no_promo = true;
} else {
this.no_promo = false;
if (this.vouchers_length$ > this.item_tresh) {
//check if data us long enough to for infinit scroll
this.all_vouchers = [];
for (let i = 0; i < this.item_tresh; i++) {
this.all_vouchers.push(this.vouchers$[i]);
console.log("infinite scroll active");
}
this.desktopViewvoucher = this.all_vouchers[0]
} else {
this.all_vouchers = this.vouchers$;
this.desktopViewvoucher = this.all_vouchers[0]
console.log("load complete");
}
}
});
console.log(this.all_vouchers);
}
loadMoreItems() {
let z = this.all_vouchers.length - 1;
for (let i = z; i < z + this.item_tresh; i++) {
this.all_vouchers.push(this.vouchers$[i]);
}
return this.all_vouchers;
}
loadData(event) {
setTimeout(() => {
this.loadMoreItems();
event.target.complete();
// App logic to determine if all data is loaded
// and disable the infinite scroll
// if (data.length == 1000) {
// event.target.disabled = true;
// }
}, 500);
}
// notify a sale page to change segment
addPromo() {
this.notifyParent.emit("ad_promo");
}
updatePromo(data) {}
onDeletePromo(data) {}
setDesktopVoucherView( voucher){
this.desktopViewvoucher = voucher
console.log(voucher)
}
expand(event){
if(this.expand_detail){
this.expand_detail = false
}else{
this.expand_detail = true
}
this.detail_view_product =false
}
viewProduct(item){
this.detail_view_product = item
console.log(item)
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { OrdersStateService } from "./orders-state.service";
describe("OrdersStateService", () => {
let service: OrdersStateService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(OrdersStateService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, OnInit } from '@angular/core';
import { ReportStateService } from 'src/app/core-modules/services/reports/report-state/report-state.service';
import { ReportFacadeService } from "src/app/core-modules/services/reports/report-facade/report-facade.service";
import {Data} from'./MOCK_DATA';
import {DatePipe} from '@angular/common';
import { ActionSheetController } from '@ionic/angular';
@Component({
selector: 'app-units-sold',
templateUrl: './units-sold.component.html',
styleUrls: ['./units-sold.component.scss'],
})
export class UnitsSoldComponent implements OnInit {
lineChartDataInput:any; /*line chart data set*/
lineChartLabelInput:any;
pieChartDataInput:any;
pieChartLabelsInput:any;
constructor(public actionSheetController: ActionSheetController, public datepipe:DatePipe, private reportState: ReportStateService,private reportFacade: ReportFacadeService , ) {
Object.assign(this,{Data})
}
public report_status;
public range = 7;
public filtered;
public today = new Date();
public startDateO;
public lineOutPudata =[];
public lineOutlabel =[];
public currentFilter = 'Days';
Data:any;
/* data draft
product id
product name
product category
amount
price
discount
category
data-time*/
ngOnInit() {
this.dataDayFilter()
/*for (let i = 0 ; i < Data.length; i++) {
let saleDate = new Date(Data[i].date)
if(this.daysDifference(saleDate,startDate)< this.range ){
console.log(Data[i])
}
}*/
this.reportState.getReportStatus().subscribe(
res => {
this.report_status = res
},
err => {
console.log(err)
}
)
}
/*external fuction from here*/
filterBydays(today,startDate){
if(this.daysDifference(today,startDate) < this.range){
return true
}
}
daysDifference(startDate,endDate){
return Math.floor((Date.UTC(startDate.getFullYear(), startDate.getMonth(),startDate.getDate()) - Date.UTC(endDate.getFullYear(),endDate.getMonth(),endDate.getDate()) ) /(1000 * 60 * 60 * 24));
}
monthsDifference(startDate,endDate){
let startDateMonth = startDate.getUTCMonth()
let startDateYear = startDate.getFullYear()
let firstOfstartDate = new Date(startDateYear,startDateMonth , 1)
let endDateMonth = endDate.getUTCMonth()
let endDateYear = endDate.getFullYear()
let lastOfEndDate = new Date(endDateYear,endDateMonth + 1, 0)
return Math.floor((Date.UTC(firstOfstartDate.getFullYear(), firstOfstartDate.getMonth(),firstOfstartDate.getDate()) - Date.UTC(lastOfEndDate.getFullYear(),lastOfEndDate.getMonth(),lastOfEndDate.getDate()) ) /(1000 * 60 * 60 * 24));
}
/*this function filteres data by day*/
dataDayFilter(){
this.lineOutPudata.splice( 0,this.lineOutPudata.length) /*this clears the output data */
this.lineOutlabel.splice(0, this.lineOutlabel.length) /*this clears the output data */
let startDate = new Date();
/*let day = today.getDate();
let year = today.getUTCMonth();
let month= today.getFullYear();*/
startDate.setDate(startDate.getDate() - this.range)
let startDateHolder = new Date()
startDateHolder.setDate(this.today.getDate() - this.range)
/*from*/
if(this.currentFilter == 'Days' ){
this.startDateO = this.datepipe.transform(startDate,'MMM d,y')
/*console.log(start.toString())*/
/*console.log(Data.filter(word => word.category == "Jewelery"))*/
/*console.log(this.daysDifference(today,startDate));*/
this.filtered= Data.filter(p => {
let saleDate = new Date(p.date)
if(this.daysDifference(saleDate,startDate)< this.range ){
return true
}
} )
/** data set for pie chart*/
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
let soldCategoryData = groupBy(this.filtered ,"date")
let soldCategoryDatakeys = Object.keys(soldCategoryData) /*this gets the keys from soldCategoryData*/
console.log(soldCategoryData)
/****** DAY DATA SET line Graph***/
for (let i = this.range; i > -1; i--) {
let dateOfrange = new Date();
dateOfrange.setDate(this.today.getDate() - i)
/* this sets start date based on range*/
let dateOfrangeLatest =this.datepipe.transform(dateOfrange,'MMM d')
let dayOfrange = dateOfrange.getDate()
let monthOfrange = dateOfrange.getUTCMonth();
let yearOfrange = dateOfrange.getFullYear();
let second= this.filtered.filter(z =>{
let pDate = new Date(z.date)
if(this.daysDifference(pDate,dateOfrange) == 0 ){
return true
}
})
var total =0;
for (let i = 0; i < second.length; i++) {
total = total + second[i].amount
};
this.lineOutPudata.push(total)
this. lineOutlabel.push(dateOfrangeLatest )
}
/*assign to line data set*/
let objLineData = {}
objLineData["data"] =this.lineOutPudata
objLineData["label"] = "Total Unit sold/day"
this.lineChartDataInput = [];
this.lineChartDataInput.push(objLineData)
/*assign to line labels */
this.lineChartLabelInput = this.lineOutlabel
}
/* filter by months */
if(this.currentFilter == 'Months' ){
this.startDateO = this.datepipe.transform(startDate,'MMM,y')
this.filtered= Data.filter(p => {
let saleDate = new Date(p.date)
if((this.monthsDifference(startDate,saleDate)/31)< (this.range) ){
/*hope that 31 works :-) */
return true
}
} )
for (let i = this.range; i > -1; i--) {
let dateOfrange = new Date();
dateOfrange.setMonth(this.today.getUTCMonth() - i)
/* this sets start date based on range*/
let dateOfrangeLatest =this.datepipe.transform(dateOfrange,'MMM,y')
console.log(dateOfrangeLatest)
let dayOfrange = dateOfrange.getDate()
let monthOfrange = dateOfrange.getUTCMonth();
let yearOfrange = dateOfrange.getFullYear();
let second= this.filtered.filter(z =>{
let pDate = new Date(z.date)
let pDateMonth = pDate.getUTCMonth()
let pDateYear = pDate.getFullYear()
if( pDateMonth == monthOfrange && pDateYear == yearOfrange){
return true
}
})
var total =0;
for (let i = 0; i < second.length; i++) {
total = total + second[i].amount
};
this.lineOutPudata.push(total)
this. lineOutlabel.push(dateOfrangeLatest )
}
}
/*this is the annual filter*/
if(this.currentFilter == 'Years' ){
this.startDateO = this.today.getFullYear() - this.range
console.log(this.startDateO)
this.filtered= Data.filter(p => {
let saleDate = new Date(p.date)
let saleDateYear = saleDate.getFullYear()
let endDateYear = this.today.getFullYear()
let startDateYear = endDateYear - this.range
if( saleDateYear < endDateYear && saleDateYear > startDateYear ){
return true
}
} )
for (let i = this.range; i > -1; i--) {
let dateOfrange = new Date();
dateOfrange.setFullYear(this.today.getFullYear() - i)
/* this sets start date based on range*/
let dateOfrangeLatest =this.datepipe.transform(dateOfrange,'y')
let yearOfrange = dateOfrange.getFullYear();
let second= this.filtered.filter(z =>{
let pDate = new Date(z.date)
let pDateYear = pDate.getFullYear()
if( pDateYear == yearOfrange){
return true
}
})
var total =0;
for (let i = 0; i < second.length; i++) {
total = total + second[i].amount
};
this.lineOutPudata.push(total)
this. lineOutlabel.push(dateOfrangeLatest )
}
}
}
/****filter Action *****/
async presentActionSheet() {
const actionSheet = await this.actionSheetController.create({
header: 'Albums',
cssClass: 'my-custom-class',
buttons: [{
text: 'Days',
handler: () => {
this.currentFilter ="Days"
this.dataDayFilter()
}
}, {
text: 'Month',
handler: () => {
this.currentFilter ="Months"
this.dataDayFilter()
}
}, {
text: 'Annual',
handler: () => {
this.currentFilter ="Years"
this.dataDayFilter()
}
},
{
text: 'Cancel',
icon: 'close',
role: 'cancel',
handler: () => {
console.log('Cancel clicked');
}
}]
});
await actionSheet.present();
}
rangeChange(event){
this.range = event.detail.value
console.log(this.range)
this.dataDayFilter()
}
startDateChange(event){
this.today = new Date( event.detail.value)
this.dataDayFilter()
}
segmentChanged(event){
}
}
<file_sep>import { Component, OnDestroy, OnInit } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { Router } from "@angular/router";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { browserRefresh } from "src/app/app.component";
import { MenuStateService } from "src/app/core-modules/services/menus/menu-state/menu-state.service";
import { ToastController } from "@ionic/angular";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { from } from 'rxjs';
import { distinctUntilChanged } from 'rxjs/operators';
@Component({
selector: "app-products",
templateUrl: "./products.page.html",
styleUrls: ["./products.page.scss"],
})
export class ProductsPage implements OnInit {
products: boolean = false;
public segment:any ;
category: boolean = true;
updateProduct: any = {};
products$;
searchPlaceholder: string = "Products";
device_screen ;
filter
constructor(
private productFacade: ProductFacadeService,
private categoryFacade: CategoryFacadeService,
private userFacade: UserFacadeService,
private route: Router,
public menuState: MenuStateService,
public toastController: ToastController,
private headerStateService: HeaderStateService,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
this.headerStateService.getDesktopSideMenuState().pipe(distinctUntilChanged()).subscribe(
(res)=>{
this.segment = res
console.log(res)
}
)
}
ngOnInit() {
this.filter ="Default"
//get current store
this.checkSegment()
this.getProducts();
this.menuState.loggedIn();
//load categories
this.categoryFacade.loadCategories();
}
checkSegment(){
if(this.headerStateService.desktop_data_pass){
this.segment = this.headerStateService.getDesktopSideMenuState()
}else{
this.segment = "inventory"
}
console.log(this.segment)
}
doRefresh(event) {
if (event.returnValue) {
this.getProducts();
}
}
getProducts() {
//get current store
let store = this.userFacade.getCurrentStore();
//load store profile and subscribe store products
if (Object.keys(store).length === 0) {
this.userFacade.getUser$().subscribe(
(res) => {
this.userFacade.loadStoreProfile(res.store_id);
this.productFacade.loadStoreProducts(res.store_id);
this.products$ = this.productFacade
.getCurrentStoreProducts()
.pipe(distinctUntilChanged());
},
(err) => {
console.log(err);
}
);
} else {
this.userFacade.loadStoreProfile(store._id);
this.productFacade.loadStoreProducts(store._id);
this.products$ = this.productFacade
.getCurrentStoreProducts()
.pipe(distinctUntilChanged());
}
}
fromAddProduct(data) {
this.segment = data;
}
addProductSlides(data) {
let products;
if (data === "add_product") {
//check lisence
this.productFacade
.getStoreProducts$()
.pipe(distinctUntilChanged())
.subscribe((res) => {
products = res;
let product_length = products.length
let store = this.userFacade.getCurrentStore();
let free_trial = store.free_trial;
let trial_end_date
let trial_number_products
if (free_trial) {
trial_end_date = free_trial.end_date;
trial_number_products = free_trial.allowed_products
}
let return_policy_length = store.return_policy.length
let lisences = store.lisence
let today_ms = Date.now()
let one_day_ms = 86400000
console.log(product_length, return_policy_length)
//check for lisence
if (product_length == 0 && return_policy_length == 0) {
this.presentToast(
"set up your return policy first!"
);
/*setTimeout(() => {
this.route.navigate(["/store/return-policy"]);
}, 1000)*/
} else {
let allow_access = false
if (lisences.length > 0) {
lisences.forEach(lisence => {
let lisence_end_date = lisence.payed_amount.end_date
let time_remaining = (lisence_end_date - today_ms) / one_day_ms
if (product_length < lisence.number_of_products && time_remaining > 0) {
allow_access = true
}
});
} else {
let trial_time_remaining = (trial_end_date - today_ms) / one_day_ms
if (trial_time_remaining > 0 && product_length < trial_number_products) {
allow_access = true
}
}
if (allow_access) {
this.segment = data
} else {
this.presentToast(
"Buy license or try our 15 days trial to add more products"
);
}
}
});
} else {
this.segment = data;
}
}
searchItems(ev) {
// set val to the value of the searchbar
let val = ev.target.value;
// if the value is an empty string don't filter the items
if (val && val.trim() != "") {
this.productFacade.loadSearchedStoreProducts(val);
} else {
// Reset items back to all of the items
this.productFacade.loadVendorProducts();
}
}
async presentToast(msg) {
const toast = await this.toastController.create({
message: msg,
duration: 2500,
});
toast.present();
}
//desktop
producChanged(event){
let segment = event.detail.value
console.log(segment)
this.headerStateService.setDesktopSideMenuState(segment)
}
checkFilter(data){
console.log(data)
if(this.filter == data ){
return true
}else{
return false
}
}
}
<file_sep>import { Injectable } from "@angular/core";
import { Md5 } from "ts-md5/dist/md5";
import { ProfileApiService } from "src/app/core-modules/services/shared/profile/profile-api/profile-api.service";
import { PaymentApiService } from "src/app/core-modules/services/shared/payments/payment-api/payment-api.service";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class PayfastFacadeService {
md5 = new Md5();
constructor(
private paymentApi: PaymentApiService,
private userApi: ProfileApiService,
private env: EnvService
) {}
temporaryOrder(order_info, lisence) {
let cart;
let real;
//check if its license or product
if (lisence) {
cart = lisence;
real = true;
} else {
cart = JSON.parse(localStorage.getItem("cart"));
}
let temp = {
data: order_info,
cart: cart,
lisence: real,
};
this.paymentApi.tempOrder(temp).subscribe(
(res) => {
localStorage.removeItem("cart");
return res.order[0].m_payment_id;
},
(err) => {
console.log(err);
}
);
}
getPayfastForm() {
// get auth and process paymnet
let payfastForm: any = {};
let hash;
this.userApi.getUser().subscribe(
(res) => {
// Merchant details
payfastForm.merchant_id = "10016542";
payfastForm.merchant_key = "w7wn35bsap1pf";
payfastForm.return_url = `${this.env.apiUrl}/buy/payment/checkout/payfast/success`;
payfastForm.cancel_url = `${this.env.apiUrl}/buy/payment/checkout/payfast/cancel`;
payfastForm.notify_url = `${this.env.apiUrl}/api/payfast/notify_url`;
// Buyer Details
payfastForm.buyer_email = res.user.email;
// Transaction Details
payfastForm.m_payment_id = this.randomize();
payfastForm.amount = parseFloat(localStorage.getItem("total")).toFixed(
2
);
payfastForm.item_name = this.randomize();
payfastForm.item_description = this.randomize();
payfastForm.email_confirmation = "1";
payfastForm.confirmation_address = res.user.email;
// Set Payment Method
payfastForm.payment_method = "eft";
//generate a signature
hash = `merchant_id=${encodeURIComponent(
payfastForm.merchant_id
)}&merchant_key=${encodeURIComponent(
payfastForm.merchant_key
)}&return_url=${encodeURIComponent(
payfastForm.return_url
)}&cancel_url=${encodeURIComponent(
payfastForm.cancel_url
)}¬ify_url=${encodeURIComponent(
payfastForm.notify_url
)}&email_address=${encodeURIComponent(
payfastForm.buyer_email
)}&m_payment_id=${encodeURIComponent(
payfastForm.m_payment_id
)}&amount=${encodeURIComponent(
payfastForm.amount
)}&item_name=${encodeURI(
payfastForm.item_name
)}&item_description=${encodeURI(
payfastForm.item_description
)}&email_confirmation=${encodeURIComponent(
payfastForm.email_confirmation
)}&confirmation_address=${encodeURIComponent(
payfastForm.confirmation_address
)}&payment_method=${encodeURIComponent(payfastForm.payment_method)}`;
// MD5 encode
payfastForm.signature = this.md5.appendStr(hash).end();
},
(err) => {
console.log(err);
}
);
return payfastForm;
}
// randomize m_payment_id
randomize() {
// Randomize chars
let length = 10;
let chars =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
}
<file_sep>module.exports = {
// smtpTransport Email
server:{
host: 'smtp.ethereal.email',
port: 587,
auth: {
user: '<EMAIL>',
pass: '<PASSWORD>'
}
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ProductCacheService } from "./product-cache.service";
describe("ProductCacheService", () => {
let service: ProductCacheService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ProductCacheService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, OnInit, ViewChild } from "@angular/core";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { IonSlides } from "@ionic/angular";
import { ToastController } from "@ionic/angular";
import { Location } from "@angular/common";
import { ProfileApiService } from "src/app/core-modules/services/profile/profile-api/profile-api.service";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
@Component({
selector: "app-returns",
templateUrl: "./returns.page.html",
styleUrls: ["./returns.page.scss"],
})
export class ReturnsPage implements OnInit {
@ViewChild("check_slides", { static: true }) slides: IonSlides;
public product;
public order;
public returnPolicy;
public policy_comment = "";
public selectedReturnReasons: any = [];
public return_reason = [
"Damaged Goods",
"Wrong Order Delivered",
"SCAM",
"size variation ",
];
store;
constructor(
private headerState: HeaderStateService,
public toastController: ToastController,
private _location: Location,
private userApi: ProfileApiService,
private orderFacade: OrderFacadeService,
private userFacade: UserFacadeService
) {}
ngOnInit() {
this.product = this.headerState.data_passed.product;
this.order = this.headerState.data_passed.order;
this.store = this.userFacade.getCurrentStore();
console.log(this.store);
this.userApi.storeProfile({ _id: this.store._id }).subscribe(
(res_store) => {
//loadStore
let store = res_store.store;
this.returnPolicy = store.return_policy;
},
(err) => {
console.log(err);
}
);
}
setReturnReason(reason) {
if (this.selectedReturnReasons.includes(reason)) {
let i = this.selectedReturnReasons.indexOf(reason);
this.selectedReturnReasons.splice(i, 1);
} else {
console.log(
this.selectedReturnReasons,
"len",
this.selectedReturnReasons.length
);
this.selectedReturnReasons.push(reason);
}
}
isSelected(reason) {
if (this.selectedReturnReasons.includes(reason)) {
return "selected";
}
}
next(slides) {
slides.slideNext();
this.presentToast("slide to go back");
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 3000,
position: "bottom",
});
toast.present();
}
submit() {
const elementsIndex = this.order.items.findIndex((item) => {
return item._id == this.product._id;
});
let newArray = [...this.order.items];
let return_reason = {
return_reasons: this.selectedReturnReasons,
return_comment: this.policy_comment,
};
//modify order product
newArray[elementsIndex] = {
...newArray[elementsIndex],
return: return_reason,
};
let new_product = newArray[0];
let data = {
return: true,
return_reason: return_reason,
order_id: this.order._id,
product: new_product,
};
console.log(data);
this.orderFacade.updateStoreOrder({ data: data });
}
backClicked() {
this._location.back();
}
}
<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
import { EnvService } from "src/app/env.service";
@Injectable({
providedIn: "root",
})
export class OrdersApiService {
_urlUpdateOrder = `${this.env.apiUrl}/api/payfast/update_order_status`;
_urlUserOrders = `${this.env.apiUrl}/api/payfast/get_user_orders`;
_urlverifyOrderDelivery = `${this.env.apiUrl}/api/payfast/update_order_delivery`;
constructor(private _http: HttpClient, private env: EnvService) {}
updateOrderStatus(data: any) {
return this._http.post<any>(this._urlUpdateOrder, data);
}
getUserOrders() {
return this._http.get<any>(this._urlUserOrders);
}
verifyOrderDelivery(data: any) {
return this._http.post<any>(this._urlverifyOrderDelivery, data);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject } from "rxjs";
@Injectable({
providedIn: "root",
})
export class AuthStateService {
private licenses$ = new BehaviorSubject<any>([
{
license_name: "Novice-Vendor-Lite",
monthly_price: 180.0,
annual_price: 2160.0,
annual_advance_payment: 1944.0,
number_of_stores: "Single store",
number_of_products: 5,
number_of_users: 1,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Novice-Vendor-Premium",
monthly_price: 270.0,
annual_price: 3564.0,
annual_advance_payment: 3207.6,
number_of_stores: "Single store",
number_of_products: 25,
number_of_users: 2,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Novice-Vendor-Alist",
monthly_price: 360.0,
annual_price: 4752.0,
annual_advance_payment: 4276.8,
number_of_stores: "Single store",
number_of_products: 50,
number_of_users: 5,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Start-Up",
monthly_price: 1800.0,
annual_price: 23760.0,
annual_advance_payment: 21384.0,
number_of_stores: "Two stores",
number_of_products: 75,
number_of_users: 20,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Start-Up-Premium",
monthly_price: 3600.0,
annual_price: 47520.0,
annual_advance_payment: 42768.0,
number_of_stores: "Two stores",
number_of_products: 500,
number_of_users: 40,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Start-Up-Advance",
monthly_price: 9000.0,
annual_price: 118800.0,
annual_advance_payment: 106920.0,
number_of_stores: "Multi stores",
number_of_products: 2500,
number_of_users: 100,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Start-Up-Alist",
monthly_price: 18000.0,
annual_price: 237600.0,
annual_advance_payment: 213840.0,
number_of_stores: "Multi stores",
number_of_products: 5000,
number_of_users: 200,
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
{
license_name: "Start-Up-Alist",
monthly_price: 36000.0,
annual_price: 475200.0,
annual_advance_payment: 427680.0,
number_of_stores: "Multi stores",
number_of_products: "unlimited",
number_of_users: "unlimited",
features: [
"24/7 SUPPORT",
"SALES CHANNELS",
"PROMOTIONS (Market Places, Google Ads, Social Media",
"BILL OF MATERIAL CONFIGURATOR",
"DELIVERY COMFIRMATION",
"ORDER FULFILMENT",
"SALES CAMPAIGN",
"DISCOUNT VOUCHER",
"CHECKOUT PAYMENT SYSTEM",
"SSL CERTIFICATE",
"ABANDONED CART RECOVERY*",
"PROFESSIONAL REPORTS",
"THIRD-PARTY CALCULATED SHIPPING RATES",
"Show calculated rates with your own account or third-party couriers",
],
transaction_percentage: {
cartalist: 1.2,
payment_gateway: 5,
},
},
]);
constructor() {}
getPremadeLicenses() {
return this.licenses$.asObservable();
}
}
<file_sep>import { Component, OnInit, Output, EventEmitter } from '@angular/core';
import { DataService } from '../data.service';
@Component({
selector: 'app-widgetsnav',
templateUrl: './widgetsnav.component.html',
styleUrls: ['./widgetsnav.component.scss'],
})
export class WidgetsnavComponent implements OnInit {
@Output() selected = new EventEmitter();
name: string;
showExpB:boolean=true;
message: string;
constructor(private data: DataService) { }
ngOnInit() {
}
user(){
this.selected.emit("users")
}
sales(){
this.selected.emit("sales")
}
showExp(){
this.showExpB=false;
}
showExpAgain(){
this.showExpB=true;
}
}
<file_sep>const express = require("express");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const router = express.Router();
// Load input validation
const validateCategoryInput = require("../../../validation/product/categoryValidation");
// Load Category model
const Category = require("../../../models/category/Category");
// Register Category
router.post("/createCategory", (req, res) => {
const cat = req.body;
Category.findOne({ name: cat.categoryName })
.then((category) => {
if (category) {
return res.json({ category: "Category exists" });
} else {
const newCategory = new Category({
categoryName: cat.categoryName,
description: cat.categoryDescription,
units: cat.categoryUnits,
unitOfMeasure: cat.categoryUnitsMeasure,
properties: cat.properties,
});
newCategory.save().then((category) => {
return res.json({ category: category });
});
}
})
.catch((err) => console.log(err));
});
// View Category
router.get("/viewCategory", (req, res) => {
Category.find()
.then((category) => {
return res.json({ category: category });
})
.catch((err) => console.log(err));
});
// Update Category
router.post("/updateCategory", (req, res) => {
Category.updateOne(
{ _id: req.body.category_id },
{
$set: {
categoryName: req.body.categoryName,
units: req.body.categoryUnits,
unitOfMeasure: req.body.categoryUnitsMeasure,
description: req.body.categoryDescription,
properties: req.body.properties,
},
}
)
.then((category) => {
return res.json({ category: category });
})
.catch((err) => console.log(err));
});
// Delete Category
router.get("/deleteCategory", (req, res) => {
/*Category.deleteOne({ _id: req.body.id }).then((category) => {
return res.json({ category: category });
});*/
/*Category.update(
{ _id: "5f077e9226af71d81982796b" },
{ $pull: { list: { Categories: "" } } },
{ multi: true }
);
console.log("deleted");*/
});
module.exports = router;
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject, Observable } from "rxjs";
import { ThrowStmt } from "@angular/compiler";
@Injectable({
providedIn: "root",
})
export class CartService {
private cart = [];
private cartItemCount = new BehaviorSubject(0);
constructor() {}
clearCart() {
this.cart = [];
this.cartItemCount.next(0);
}
getCart() {
return this.cart;
}
getCartItemCount() {
if (!!localStorage.getItem("cart")) {
this.cart = JSON.parse(localStorage.getItem("cart"));
let cartAmount = 0;
for (let p of this.cart) {
cartAmount += p.amount;
}
//let cart_total = this.cartItemCount.value + cartAmount;
this.cartItemCount.next(cartAmount);
return this.cartItemCount;
} else {
return this.cartItemCount;
}
}
addProduct(product) {
console.log(product);
let added = false;
//if the product already exist in the cart
for (let p of this.cart) {
// if product has no promo
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
!p.promo_kind
) {
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
added = true;
break;
}
//Item discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
p.promo_kind == "itemDiscount"
) {
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
added = true;
break;
}
//volume discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
p.promo_kind == "volume"
) {
p.amount += product.thresh;
this.cartItemCount.next(this.cartItemCount.value + product.thresh);
added = true;
break;
}
//buy1get1 discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
p.promo_kind == "buy1get1free"
) {
console.log("buy1get1free");
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
added = true;
break;
}
//combo discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
p.promo_kind == "combo"
) {
console.log("combo");
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
added = true;
break;
}
}
// if first time product
if (!added) {
this.cart.push(product);
//Update cartItemCount and cart
this.cartItemCount.next(this.cartItemCount.value + product.amount);
}
localStorage.setItem("cart", JSON.stringify(this.cart));
}
increaseProduct(product) {
if (product.promo_kind) {
for (let p of this.cart) {
//if item Discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "itemDiscount"
) {
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
break;
}
// if volume Discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "volume"
) {
p.amount += product.thresh;
this.cartItemCount.next(this.cartItemCount.value + product.thresh);
break;
}
//buy1get1free Discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "buy1get1free"
) {
p.amount += 2;
this.cartItemCount.next(this.cartItemCount.value + 2);
break;
}
//Combo Discount
if (
p._id == product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "combo"
) {
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
break;
}
}
for (let p of this.cart) {
// if buy1get1 & product is free
if (
p._id === product._id &&
p.productPrice === 0 &&
product.promo_kind == "buy1get1free"
) {
p.amount += product.thresh;
this.cartItemCount.next(this.cartItemCount.value + product.thresh);
break;
}
}
} else {
// if product has no promo
for (let p of this.cart) {
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
!p.promo_kind
) {
p.amount += 1;
this.cartItemCount.next(this.cartItemCount.value + 1);
break;
}
}
}
localStorage.setItem("cart", JSON.stringify(this.cart));
}
decreaseProduct(product) {
if (product.promo_kind) {
for (let [index, p] of this.cart.entries()) {
//if item Discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "itemDiscount"
) {
p.amount -= 1;
if (p.amount == 0) {
this.cart.splice(index, 1);
p.amount = 1;
}
this.cartItemCount.next(this.cartItemCount.value - 1);
break;
}
// if volume Discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "volume"
) {
p.amount -= product.thresh;
if (p.amount == 0) {
this.cart.splice(index, 1);
p.amount = 1;
}
this.cartItemCount.next(this.cartItemCount.value - product.thresh);
break;
}
// if buy1get1free
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "buy1get1free"
) {
p.amount -= 2;
if (p.amount == 0) {
this.cart.splice(index, 1);
// p.amount = 1;
}
this.cartItemCount.next(this.cartItemCount.value - 2);
break;
}
//Combo Discount
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "combo"
) {
p.amount -= 1;
if (p.amount == 0) {
this.cart.splice(index, 1);
p.amount = 1;
}
this.cartItemCount.next(this.cartItemCount.value - 1);
break;
}
}
for (let [index, p] of this.cart.entries()) {
// if buy1get1 & product is free
if (
p._id === product._id &&
p.productPrice === 0 &&
product.promo_kind == "buy1get1free"
) {
p.amount -= 1;
if (p.amount == 0) {
this.cart.splice(index, 1);
p.amount = 1;
}
this.cartItemCount.next(this.cartItemCount.value - 1);
break;
}
}
} else {
// if product has no promo
for (let [index, p] of this.cart.entries()) {
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
!p.promo_kind
) {
p.amount -= 1;
if (p.amount == 0) {
this.cart.splice(index, 1);
p.amount = 1;
}
this.cartItemCount.next(this.cartItemCount.value - 1);
break;
}
}
}
localStorage.setItem("cart", JSON.stringify(this.cart));
}
removeProduct(product) {
for (let [index, p] of this.cart.entries()) {
if (p._id === product._id && p.productPrice === product.productPrice) {
this.cartItemCount.next(this.cartItemCount.value - p.amount);
this.cart.splice(index, 1);
p.amount = 1;
}
}
for (let [index, p] of this.cart.entries()) {
// if buy1get1free
if (
p._id === product._id &&
p.productPrice === 0 &&
product.promo_kind == "buy1get1free"
) {
this.cartItemCount.next(this.cartItemCount.value - p.amount);
this.cart.splice(index, 1);
p.amount = 1;
}
// if combo
if (
p._id === product._id &&
p.productPrice === product.productPrice &&
product.promo_kind == "combo"
) {
this.cartItemCount.next(this.cartItemCount.value - p.amount);
this.cart.splice(index, 1);
p.amount = 1;
}
}
localStorage.setItem("cart", JSON.stringify(this.cart));
}
}
<file_sep>const nodemailer = require("nodemailer");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const smtp = require("../../config/email/vars");
const frontHost = require("../../config/front/server");
module.exports = function emailAuth(password, user, req, res) {
// Send verification Email
const smtpTransport = nodemailer.createTransport({
host: smtp.server.host,
port: smtp.server.port,
auth: {
user: smtp.server.auth.user,
pass: smtp.server.auth.pass,
},
});
// Prepare Email
let token, link, mailOptions;
token = user.verified.token;
link = frontHost.server + "user/verify/?token=" + token;
mailOptions = {
to: user.email,
subject: "Please confirm your Email account",
html:
"Hello,<br> Please Click on the link to verify your email.<br><a href=" +
link +
">Click here to verify</a>" +
"<br> Click on the link to change your password<br><a href=" +
link +
">Click here to change password</a>" +
"current pass: " +
password,
};
// Send Email
smtpTransport.sendMail(mailOptions, function (error, response) {
if (error) {
res.json({
message: "Oops something went wrong while sending",
err: error,
});
} else {
const payload = {
id: user.id,
email: user.email,
};
// Sign token
jwt.sign(
payload,
keys.secretOrKey,
{
expiresIn: 31556926, // 1 year
},
(err, token) => {
res.json({
message: "User registered, Please check your email to verify",
user: user,
token: token,
});
}
);
}
});
};
<file_sep>module.exports = {
//Remember to upload on gitnore
mongoURI: "mongodb+srv://peekaymaja:<EMAIL>/test?retryWrites=true&w=majority",
secretOrKey: "<KEY>",
//GoogleStrategy
google:{
clientID: '349308001429-k87d0aah3lue7m80m8mhihi7k52gdtke.apps.googleusercontent.com',
clientSecret: '<KEY>'
},
session:{
cookieKey: 'sdksdnssd125521ssfdsfsd sdfs'
},
facebook: {
clientID: '662779470798128',
clientSecret: 'fba09ace3de6402606ee3cedb10e108c'
},
super: {
admin: true,
id: "awserdfdgtr<PASSWORD>1254",
email: "<EMAIL>",
password: "<PASSWORD>"
}
};
<file_sep>import { Component, OnInit, OnChanges } from '@angular/core';
import { PayfastFacadeService } from "src/app/core-modules/services/payments/payfast-facade/payfast-facade.service";
import { ProfileApiService } from 'src/app/core-modules/services/profile/profile-api/profile-api.service';
import { Md5 } from "ts-md5/dist/md5";
@Component({
selector: 'app-payment',
templateUrl: './payment.page.html',
styleUrls: ['./payment.page.scss'],
})
export class PaymentPage implements OnInit, OnChanges {
md5 = new Md5();
user_email
payfastForm;
selected_license
billing_cycle_selected
constructor(
private payFacade: PayfastFacadeService,
private userApi: ProfileApiService,
) { }
ngOnChanges(){
console.log(this.payfastForm)
}
ngOnInit() {
let state = window.history.state
this.user_email = state.user_email
this.selected_license = {...state.selected_license}
this.billing_cycle_selected = state.billing_cycle_selected
this.payFast()
console.log(this.payfastForm)
}
//Send form to payfast
onSub(info) {
localStorage.removeItem("license_name")
let license = this.selected_license;
this.payFacade.temporaryOrder(info, license);
}
payFast() {
if(this.selected_license){
let payfastForm: any = {};
// Merchant details
payfastForm.merchant_id = "10016542";
payfastForm.merchant_key = "w7wn35bsap1pf";
payfastForm.return_url =
"https://cartalist.herokuapp.com/buy/payment/checkout/payfast/success";
payfastForm.cancel_url =
"https://cartalist.herokuapp.com/buy/payment/checkout/payfast/cancel";
payfastForm.notify_url = "https://cartalist.herokuapp.com/api/payfast/notify_url";
// Buyer Details
payfastForm.buyer_email = this.user_email;
// Transaction Details
payfastForm.m_payment_id = this.randomize();
payfastForm.amount = this.selected_license.payed_amount.amount.toFixed(2);
payfastForm.item_name = "Lisence";
payfastForm.item_description = this.selected_license.license_name;
payfastForm.email_confirmation = "1";
payfastForm.confirmation_address = this.user_email;
// Set Payment Method
payfastForm.payment_method = "eft";
let hash = `merchant_id=${encodeURIComponent(
payfastForm.merchant_id
)}&merchant_key=${encodeURIComponent(
payfastForm.merchant_key
)}&return_url=${encodeURIComponent(
payfastForm.return_url
)}&cancel_url=${encodeURIComponent(
payfastForm.cancel_url
)}¬ify_url=${encodeURIComponent(
payfastForm.notify_url
)}&email_address=${encodeURIComponent(
payfastForm.buyer_email
)}&m_payment_id=${encodeURIComponent(
payfastForm.m_payment_id
)}&amount=${encodeURIComponent(
payfastForm.amount
)}&item_name=${encodeURI(
payfastForm.item_name
)}&item_description=${encodeURI(
payfastForm.item_description
)}&email_confirmation=${encodeURIComponent(
payfastForm.email_confirmation
)}&confirmation_address=${encodeURIComponent(
payfastForm.confirmation_address
)}&payment_method=${encodeURIComponent(payfastForm.payment_method)}`;
// MD5 encode
payfastForm.signature = this.md5.appendStr(hash).end();
this.payfastForm = payfastForm
console.log(payfastForm,this.payfastForm)
}
}
randomize() {
// Randomize chars
let length = 10;
let chars =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
}
<file_sep>import { Injectable } from "@angular/core";
import { CategoryApiService } from "../category-api/category-api.service";
import { CategoryStateService } from "../category-state/category-state.service";
@Injectable({
providedIn: "root",
})
export class CategoryFacadeService {
constructor(
private categoryState: CategoryStateService,
private categoryApi: CategoryApiService
) {}
loadCategories() {
this.categoryApi.getCategories().subscribe(
(res) => {
let categories = res.category;
this.categoryState.setCategories(categories);
},
(err) => {
console.log(err);
}
);
}
getCategories$() {
// here we just pass the state without any projections
// it may happen that it is necessary to combine two or more streams and expose to the components
return this.categoryState.getCategories$();
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject } from "rxjs";
@Injectable({
providedIn: "root",
})
export class OrdersStateService {
private orders$ = new BehaviorSubject<any[]>([]);
constructor() {}
updateOrder(data) {
const orders = this.orders$.getValue();
//update order item status
for (let i = 0; i < orders.length; i++) {
if (orders[i]._id === data._id) {
orders[i] = data;
}
}
this.orders$.next([...orders]);
}
// return orders$ state
getOrders$() {
return this.orders$.asObservable();
}
// Load new set of orders
setOrders(orders) {
this.orders$.next(orders);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { Router } from "@angular/router";
@Component({
selector: 'app-user-liked',
templateUrl: './user-liked.component.html',
styleUrls: ['./user-liked.component.scss'],
})
export class UserLikedComponent implements OnInit {
products$;
public liked;
constructor(private _productFacade: ProductFacadeService,
private headerState: HeaderStateService,
private cartFacade: CartFacadeService,
private router: Router,) {}
ngOnInit() {
this._productFacade.loadUserlikedProducts();
this.products$ = this._productFacade.getUserlikedProducts();
console.log(this.products$);
this.products$.subscribe(
res => {
this.liked = res
}
)
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
}
<file_sep>const express = require("express");
const mongoose = require("mongoose");
const bodyParser = require("body-parser");
const passport = require("passport");
const cookieSession = require("cookie-session");
const cors = require("cors");
//Access Passport Config
const googleAuth = require("./config/users/google-auth");
const facebookAuth = require("./config/users/facebook-auth");
//Access Pages for Routing
const users = require("./routes/api/auth/user");
const activity = require("./routes/api/store/activity");
const notification = require("./routes/api/user/notifications");
const passportRoutes = require("./routes/api/auth/user_oauth");
const verifyRoute = require("./routes/api/auth/verify");
const forgotPasswordRoute = require("./routes/api/auth/forgotPassword");
const profileRoutes = require("./routes/api/user/profile");
const storeRegister = require("./routes/api/store/auth");
const category = require("./routes/api/store/category");
const product = require("./routes/api/store/product");
const payfast = require("./routes/api/payments/payfast");
const receipt = require("./routes/api/payments/receipt");
const voucher = require("./routes/api/store/voucher");
const email = require("./routes/api/emails/email");
const keys = require("./config/users/keys");
const app = express();
app.use(cors());
// Bodyparser Middleware
app.use(bodyParser.json());
// For Testing
app.use(bodyParser.urlencoded({ extended: true }));
// Encrypt and set up cookie-session
app.use(
cookieSession({
maxAge: 24 * 60 * 60 * 1000, //day in ms
keys: [keys.session.cookieKey],
})
);
// Passport initialize and use session-cookies
app.use(passport.initialize());
app.use(passport.session());
// Passport jwt strategy initialize
require("./config/users/passport")(passport);
// Point static path to public
app.use(express.static("./client/www"));
/* DB Config */
// Access mongoURI
const db = keys.mongoURI;
// Connect to MongoDB via mlab
mongoose
.connect(db, { useNewUrlParser: true, dbName: "afrob" })
.then(() => console.log("MongoDB successfully connected"))
.catch((err) => {
console.log(err, "Check your internet connection!");
});
// API Routes
app.use("/api/user", users);
app.use("/api/activities", activity);
app.use("/api/notifications", notification);
app.use("/api/oauth", passportRoutes);
app.use("/api/verify", verifyRoute);
app.use("/api/forgotPassword", forgotPasswordRoute);
app.use("/api/profile", profileRoutes);
app.use("/api/store", storeRegister);
app.use("/api/category", category);
app.use("/api/product", product);
app.use("/api/payfast", payfast);
app.use("/api/voucher", voucher);
app.use("/api/receipt", receipt);
app.use("/api/emails", email);
// Listening to port
const port = process.env.PORT || 3000;
if (process.env.NODE_ENV === "production") {
app.use(express.static("./client/www"));
}
app.listen(port, () =>
console.log(`Server up and running.. on port ${port} !`)
);
<file_sep>import { Component, OnInit, EventEmitter, Output } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { ActivityFacadeService } from "src/app/core-modules/services/activities/activity-facade/activity-facade.service";
import { distinctUntilChanged } from 'rxjs/operators';
@Component({
selector: "app-notifications",
templateUrl: "./notifications.component.html",
styleUrls: ["./notifications.component.scss"],
})
export class NotificationsComponent implements OnInit {
@Output() childSegment = new EventEmitter<string>();
public notifications$;
public rate_order_notification = [];
public review;
constructor(
private _productFacade: ProductFacadeService,
private activityFacade: ActivityFacadeService
) {}
ngOnInit() {
//Load activities
this.activityFacade.loadNotifications();
//get activities
this.notifications$ = this.activityFacade.getNotifications$().pipe(distinctUntilChanged());
}
reviewProduct(data) {
this.childSegment.emit("add-Reviews");
//set currrent product
this._productFacade.loadCurrentProduct(data);
}
}
<file_sep>import { Component, OnInit, Output, EventEmitter } from "@angular/core";
import { FormGroup, FormBuilder, Validators } from "@angular/forms";
import { Router } from "@angular/router";
import { AuthApiService } from "src/app/core-modules/services/auth/auth-api/auth-api.service";
import { ToastController } from "@ionic/angular";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { MenuStateService } from "src/app/core-modules/services/menus//menu-state/menu-state.service";
import { Observer, fromEvent, merge, Observable } from "rxjs";
import { map } from "rxjs/operators";
@Component({
selector: "app-login",
templateUrl: "./login.page.html",
styleUrls: ["./login.page.scss"],
})
export class LoginPage implements OnInit {
title: string = "AfroB**";
login: boolean = false;
goGoogle: string = "http://localhost:3000/api/oauth/google";
private loginForm: FormGroup;
public emailVaule = "";
public passwordValue;
public Submit_load = true;
passwordType: string = "<PASSWORD>";
passwordIcon: string = "<PASSWORD>";
public return_to;
createOnline$() {
return merge<boolean>(
fromEvent(window, "offline").pipe(map(() => false)),
fromEvent(window, "online").pipe(map(() => true)),
new Observable((sub: Observer<boolean>) => {
sub.next(navigator.onLine);
sub.complete();
})
);
}
constructor(
public toastController: ToastController,
private formBuilder: FormBuilder,
private _authService: AuthApiService,
private router: Router,
private headerState: HeaderStateService,
private userFacade: UserFacadeService,
public menuState: MenuStateService
) { }
ngOnInit() {
this.createOnline$().subscribe((isOnline) => {
if (isOnline && !!localStorage.getItem("token")) {
this.menuState.updateMenuStatus("user");
this.router.navigate(["/user/landing"]);
}
});
this.loginForm = this.formBuilder.group({
email: [
"",
Validators.compose([
Validators.maxLength(20),
Validators.pattern(
"^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$"
),
Validators.required,
]),
],
password: ["", [Validators.required, Validators.minLength(6)]],
});
}
onSubmit() {
this.return_to = this.headerState.return_route;
this._authService.userLogin(this.loginForm.value).subscribe(
(res) => {
localStorage.setItem("token", res.token);
this.userFacade.loadUser();
this.menuState.loggedIn();
if (res.user.storeOwner && !this.return_to) {
this.menuState.updateMenuStatus("admin");
this.router.navigate(["/admin-store/store"]);
} else if (res.user.admin && !this.return_to) {
this.router.navigate(["/super/admin"]);
} else if (this.return_to) {
this.router.navigate([this.return_to]);
this.headerState.return_route = null;
} else {
this.menuState.updateMenuStatus("user");
this.router.navigate(["/user/landing"]);
}
this.presentToast("SignedIn successfully ");
},
(error) => {
console.error(error.usText, error);
this.presentToast("error! wrong credentials");
this.Submit_load = true;
}
);
}
sendToParent() { }
//Check if internet connection
activateLoad() {
this.Submit_load = false;
}
async presentToast(data) {
const toast = await this.toastController.create({
message: data,
duration: 2000,
position: "bottom",
});
toast.present();
}
hideShowPassword() {
this.passwordType = this.passwordType === "text" ? "<PASSWORD>" : "<PASSWORD>";
this.passwordIcon = this.passwordIcon === "eye-off" ? "eye" : "eye-off";
console.log("this");
}
}
<file_sep>import {
Component,
OnChanges,
Input,
OnInit,
SimpleChanges,
} from "@angular/core";
import { distinctUntilChanged } from "rxjs/operators";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-payments",
templateUrl: "./payments.component.html",
styleUrls: ["./payments.component.scss"],
})
export class PaymentsComponent implements OnInit, OnChanges {
orderList$;
no_order;
device_screen;
clickButton;
expand_detail
detail_view_product
desktopViewOrder
public filtered_array;
@Input() orders;
public active_expansion;
public panelOpenState = false;
constructor(private orderFacade: OrderFacadeService,breakpointObserver: BreakpointObserver) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnChanges(changes: SimpleChanges) {
this.orderList$ = this.orders.pipe(distinctUntilChanged());
}
ngOnInit() {
//getOrders
this.orderList$.subscribe(
(res) => {
this.desktopViewOrder = res[0]
if (res == undefined || res.length < 1) {
this.no_order = true;
}
},
(err) => {
console.log(err);
}
);
}
openOrder(id) {
this.clickButton = false;
this.active_expansion = id;
console.log(this.active_expansion);
}
checkId(id) {
if (this.panelOpenState === true) {
return this.active_expansion === id;
}
}
setDesktopOrderView( order){
this.desktopViewOrder = order
console.log(order)
}
expand(event){
if(this.expand_detail){
this.expand_detail = false
}else{
this.expand_detail = true
}
this.detail_view_product =false
}
viewProduct(item){
this.detail_view_product = item
console.log(item)
}
checkProductReady(order_id) {
this.orderList$.subscribe(
(res) => {
let filtered_array = res.filter((item) => {
return item._id === order_id;
});
this.filtered_array = filtered_array;
},
(err) => {
console.log(err);
}
);
let stat = [];
this.filtered_array[0].items.forEach((z) => {
stat.push(z.order_status);
});
if (stat.includes("pending")) {
return false;
} else if (stat.includes("preparing")) {
return false;
} else {
return true;
}
}
}
<file_sep>import { Injectable } from "@angular/core";
import { BehaviorSubject } from "rxjs";
@Injectable({
providedIn: "root",
})
export class CategoryStateService {
private categories$ = new BehaviorSubject<any[]>([]);
constructor() {}
// return categories$ state
getCategories$() {
return this.categories$.asObservable();
}
// Load new set of categories
setCategories(categories) {
this.categories$.next(categories);
}
}
<file_sep>const express = require("express");
const async = require("async");
const fs = require("fs");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck");
const router = express.Router();
// Load Product model
const Product = require("../../../models/category/Product");
const Store = require("../../../models/store/Store");
const Order = require("../../../models/store/Order");
// Prepare middleware
//const upload = multer({dest: 'uploads/images'})
const upload = require("../../../utils/image_middleware/multer");
const cloudinary = require("../../../utils/image_middleware/cloudinary");
// View Product
router.get("/view_all_products", (req, res) => {
Product.find()
.then((products) => {
return res.json({ products: products });
})
.catch((err) => console.log(err));
});
//view speacial category
router.post("/view_segment_products", (req, res) => {
let category = req.body.value;
let signal = req.body.signal;
Product.find()
.then((products) => {
if (signal == "category") {
let p = products.filter((p) => {
return p.category.category == category;
});
return res.json({ products: p });
}
if (signal == "top_sales") {
let p = products.filter((p) => {
return p.sold > 1;
});
return res.json({ products: p });
}
if (signal == "liked_products") {
let p = products.filter((p) => {
return p.likes.length > 0;
});
return res.json({ products: p });
}
if (signal == "recommended_products") {
let p = products.filter((p) => {
return p.license.length > 0;
});
return res.json({ products: p });
}
if (signal == "sales_products") {
let p = products.filter((p) => {
return p.sale == true;
});
return res.json({ products: p });
}
if (signal === "similar_products") {
let p = products.filter((p) => {
return (
p.category.category == category.category &&
p.category.sub_category ==
category.sub_category /* &&
p.category.product_type == category.product_type*/ &&
p._id != category.product_id
);
});
return res.json({ products: p });
}
})
.catch((err) => console.log(err));
});
//update likes
router.post("/update_like", authCheck, (req, res, done) => {
let user = req.authData;
let product_id = req.body._id;
Product.findOne({ _id: product_id })
.then((p) => {
//filter likes by id
let like = p.likes.filter((like) => {
return like == user.email;
});
if (like.length < 1) {
//update the product likees
Product.updateOne(
{ _id: product_id },
{ $push: { likes: user.email } }
).then((p) => {
Product.findOne({ _id: product_id })
.then((p) => {
res.json({ product: p });
})
.catch((err) => {
res.json({ err: err });
});
});
} else {
//update the product likees
Product.updateOne(
{ _id: product_id },
{ $pull: { likes: user.email } },
{ multi: true }
).then((p) => {
Product.findOne({ _id: product_id })
.then((p) => {
res.json({ product: p });
})
.catch((err) => {
res.json({ err: err });
});
});
}
})
.catch((err) => {
console.log(err);
});
});
router.get("/get_user_reviews", authCheck, async (req, res) => {
let user = req.authData;
let productreviews = []
Product.find({ "ratings.email": user.email })
.then((products) => {
const ratingsloop = async () => {
for await (const product of products) {
for (const rating of product.ratings) {
if (rating.email == user.email) {
productreviews.push({ product: product, rating: rating })
}
}
}
return productreviews
}
ratingsloop().then(p => {
if (p.length > 0) {
res.json({ products_reviews: p });
} else {
res.json({ msg: "No products reviewed" });
}
})
})
.catch((err) => {
res.json({ err: err });
});
});
router.get("/get_user_liked_products", authCheck, (req, res) => {
let user = req.authData;
Product.find()
.then((products) => {
//search for products i liked
let liked = products.filter((p) => {
for (i = 0; i <= p.likes.length; i++) {
return p.likes[i] == user.email;
}
});
res.json({ products: liked });
})
.catch((err) => {
res.json({ err: err });
});
});
//update review
router.post("/update_review", authCheck, (req, res, done) => {
let user = req.authData;
let product_id = req.body._id;
let rate = req.body.rate;
//let comment = req.body.comment;
Product.findOne({ _id: product_id }).then((p) => {
//filter ratings by email
let rating = p.ratings.filter((rating) => {
return rating.email == user.email;
});
console.log(p)
if (rating.length < 1) {
//update the product ratings
Product.updateOne(
{ _id: product_id },
{
$push: {
ratings: {
email: user.email,
rate: rate,
comment: req.body.comment,
},
},
}
).then((p) => {
Product.findOne({ _id: product_id })
.then((p) => {
res.json({ product: p });
})
.catch((err) => {
res.json({ err: err });
});
});
} else {
console.log(p)
res.json({ msg: "Sorry already reviewed the product", rating: rating });
}
});
});
// upload picture
router.post("/uploadImage", upload.array("image"), async (req, res) => {
const uploader = async (path) => await cloudinary.uploads(path, "Images");
if (req.method === "POST") {
const urls = [];
const files = req.files;
console.log(files)
for (const file of files) {
const { path } = file;
const newPath = await uploader(path);
urls.push(newPath);
console.log(newPath)
if (urls.length > 0) {
console.log("yessssss")
console.log(urls)
res.status(200).json({
message: "uploaded successfully",
data: urls,
});
} else {
console.log(urls)
}
// delete static files from app // TODO:
fs.unlinkSync(path);
}
} else {
res.status(405).json({
err: `${req.method} method not allowed`,
});
}
});
router.post("/delete_cloudinary_image", (req, res) => {
let public_id = req.body.public_id;
cloudinary.remover(public_id, res);
});
// Register Product
router.post("/createProduct", authCheck, (req, res) => {
const cat = req.body;
console.log(cat);
Product.findOne({ name: cat.productName })
.then((product) => {
if (!product) {
// get username and retrieve storeowner id
const newProduct = new Product({
//update store id
storeId: cat.store_id,
productName: cat.productName,
category: cat.selectCategory,
productDescription: cat.productDescription,
productImage: cat.productImage,
metaTags: cat.metaTags,
features: cat.features,
in_stock: cat.in_stock,
bill_of_material: cat.bill_of_material,
productPrice: cat.productPrice,
modified_price: cat.productPrice,
});
newProduct.save().then((product) => {
return res.json({ product: product });
});
} else {
return res.json({ product: "Product exists" });
}
})
.catch((err) => console.log(err));
});
router.post("/create_bulk_products", authCheck, (req, res) => {
user = req.authData;
data = req.body;
Store.findOne({ _id: data.store_id }).then((store) => {
async.each(data.csv, function (arr, callback) {
if (arr) {
let product = new Product({
storeId: store._id,
productName: arr[1],
category: arr[3],
productDescription: arr[2],
productImage: arr[4],
metaTags: arr[1],
productPrice: arr[5],
modified_price: arr[5],
});
product.save();
}
});
return res.json({ msg: "bulk created" });
});
});
//view store products
router.get("/view_store_products", authCheck, (req, res) => {
const user = req.authData;
console.log(user);
Product.find({ storeId: user.store_id }).then((products) => {
return res.json({ products: products });
});
});
router.post("/view_vendor_products", authCheck, (req, res) => {
const id = req.body.id;
Product.find({ storeId: id }).then((products) => {
return res.json({ products: products });
});
});
router.post("/view_product", authCheck, (req, res) => {
const id = req.body.id;
Product.find({ _id: id }).then((product) => {
return res.json({ product: product });
});
});
// Update Product
router.post("/updateProduct", authCheck, (req, res) => {
let data = req.body;
Product.updateOne(
{ _id: data.product.product_id },
{
$set: {
storeId: data.store_id,
productName: data.product.productName,
category: data.product.selectCategory,
productDescription: data.product.productDescription,
productImage: data.product.productImage,
metaTags: data.product.metaTags,
features: data.product.features,
productPrice: data.product.productPrice,
production_status: data.product.production_status,
next_available_date: data.product.next_available_date,
in_stock: data.product.in_stock,
sold: data.product.sold,
},
}
)
.then((product) => {
Product.findOne({ _id: data.product.product_id })
.then((modifiedProduct) => {
return res.json({ product: modifiedProduct });
})
.catch((err) => console.log(err));
})
.catch((err) => console.log(err));
});
// Delete Product
router.post("/deleteProduct", authCheck, (req, res) => {
Product.deleteOne({ _id: req.body.id }, (err) => {
if (!err) {
Product.find()
.then((product) => {
res.json({ product });
})
.catch((err) => console.log(err));
} else {
res.json({ msg: err });
}
});
});
module.exports = router;
<file_sep>import {
Component,
OnInit,
Output,
EventEmitter,
OnChanges,
Input,
OnDestroy,
} from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import {
AlertController,
LoadingController,
ModalController,
ToastController,
} from "@ionic/angular";
import { FormGroup, FormBuilder, FormControl } from "@angular/forms";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
import { browserRefresh } from "src/app/app.component";
import { distinctUntilChanged } from "rxjs/operators";
import { Router } from '@angular/router';
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { PopoverController } from '@ionic/angular';
import { ProductsPopoverComponent } from '../products/products-popover/products-popover.component'
@Component({
selector: "app-products-comp",
templateUrl: "./products.component.html",
styleUrls: ["./products.component.scss"],
})
export class ProductsComponent implements OnInit, OnChanges {
edit;
clickButton;
active_expansion;
no_product;
detail_view_product;
@Output() shareProduct = new EventEmitter();
@Output() segment = new EventEmitter();
//FormState
id;
updateProduct: FormGroup;
public picture_uploaded: any;
//observables
products$;
categories$;
isUpdating$;
device_screen;
@Input() p_products;
constructor(
private productFacade: ProductFacadeService,
private userFacade: UserFacadeService,
private categoryFacade: CategoryFacadeService,
public loadingController: LoadingController,
private formBuilder: FormBuilder,
private route: Router,
private productApi: ProductApiService,
public alertController: AlertController,
public modalController: ModalController,
public toastController: ToastController,
breakpointObserver: BreakpointObserver,
public popoverController: PopoverController
) {
////loading
//this.isUpdating$ = this.productFacade.isUpdating$();
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnChanges() {
this.products$ = this.p_products.pipe(distinctUntilChanged());
}
ngOnInit() {
//getCategories
this.categories$ = this.categoryFacade.getCategories$();
this.products$.subscribe(
(res) => {
this.detail_view_product = res[0]
if (res == undefined || res.length < 1) {
this.no_product = true;
}
},
(err) => {
console.log(err);
}
);
// FormGroup
this.updateProduct = this.formBuilder.group({
product_id: new FormControl(),
production_status: new FormControl(),
next_available_date: new FormControl(),
productName: new FormControl(),
selectCategory: new FormControl(),
productDescription: new FormControl(),
productImage: new FormControl(),
metaTags: new FormControl(),
productPrice: new FormControl(),
in_stock: new FormControl(),
sold: new FormControl(),
});
}
viewImageOptions(image, product) {
let data = { cloudImage: image, product: product };
//View picture and get more options
//this.presentImageView(data);
this.pictureOptions(image, product);
}
async pictureOptions(image, product) {
const data = { public_id: image.id };
const alert = await this.alertController.create({
cssClass: "my-custom-class",
header: "Confirm!",
message: "Replace Image!!!",
buttons: [
{
text: "Cancel",
role: "cancel",
cssClass: "secondary",
handler: () => { },
},
{
text: "Okay",
handler: () => {
console.log("product: ", product, "image: ", data);
////upload new image
//update product
//delete cloudinary image
//this.deleteCloudImage(Image_data);
},
},
],
});
await alert.present();
}
/*async presentImageView(data) {
console.log(data);
const modal = await this.modalController.create({
component: ProductViewModalComponent,
cssClass: "my-custom-class",
componentProps: { ...data },
});
return await modal.present();
}*/
handleFileInput(event) {
let file = event.target.files[0];
if (file.type == "image/jpeg" || file.type == "image/png") {
this.picture_uploaded = file;
}
}
uploadImage() {
const _formData = new FormData();
_formData.append("image", this.picture_uploaded);
//save picture
this.productApi.uploadImage(_formData).subscribe(
(res) => {
let imagedata = res.data;
this.updateProduct.value.productImage = imagedata;
console.log(imagedata);
},
(err) => {
console.log(err);
}
);
}
update() {
let store = this.userFacade.getCurrentStore();
this.edit = false;
let product = this.updateProduct.value;
product._id = product.product_id
this.productFacade.updateProduct({ product: product, store_id: store._id });
this.clickButton = true;
}
editProduct() {
this.edit = true;
this.clickButton = true;
}
onDeleteProduct(product) {
this.productFacade.removeProduct(product);
this.clickButton = true;
}
expanded(p) {
let product = { ...p };
// Patch the values to html
if (this.id == product._id) {
return null;
} else {
this.id = product._id;
this.updateProduct.patchValue({
product_id: product._id,
productName: product.productName,
selectCategory: product.category,
productDescription: product.productDescription,
productImage: product.productImage,
metaTags: product.metaTags,
productPrice: product.productPrice,
production_status: product.production_status,
next_available_date: product.next_available_date,
in_stock: product.in_stock,
sold: product.sold,
});
}
}
onSubmit() {
console.log("submit: ", this.updateProduct.value);
}
sendToParent() {
let products;
let data = "add_product"
//check for lisence
this.productFacade
.getStoreProducts$()
.pipe(distinctUntilChanged())
.subscribe((res) => {
products = res;
let product_length = products.length
let store = this.userFacade.getCurrentStore();
let free_trial = store.free_trial;
let trial_end_date
let trial_number_products
if (free_trial) {
trial_end_date = free_trial.end_date;
trial_number_products = free_trial.allowed_products
}
let return_policy_length = store.return_policy.length
let lisences = store.lisence
let today_ms = Date.now()
let one_day_ms = 86400000
//check for lisence
if (product_length == 0 && return_policy_length == 0) {
this.presentToast(
"set up your return policy first!"
);
/*setTimeout(() => {
this.route.navigate(["/store/return-policy"]);
}, 1000)*/
} else {
let allow_access = false
if (lisences.length > 0) {
lisences.forEach(lisence => {
let lisence_end_date = lisence.payed_amount.end_date
let time_remaining = (lisence_end_date - today_ms) / one_day_ms
if (product_length < lisence.number_of_products && time_remaining > 0) {
allow_access = true
}
});
} else {
let trial_time_remaining = (trial_end_date - today_ms) / one_day_ms
if (trial_time_remaining > 0 && product_length < trial_number_products) {
allow_access = true
}
}
if (allow_access) {
this.segment.emit(data)
} else {
this.presentToast(
"Buy license to add more products"
);
}
}
});
}
// present loading
async presentLoading() {
const loading = await this.loadingController.create({
message: "Please wait...",
duration: 500,
});
await loading.present();
const { role, data } = await loading.onDidDismiss();
}
async presentToast(msg) {
const toast = await this.toastController.create({
message: msg,
duration: 2500,
});
toast.present();
}
setDesktopProductView(product){
this.detail_view_product = product
}
async presentPopover(ev: any) {
const popover = await this.popoverController.create({
component: ProductsPopoverComponent,
cssClass: 'my-custom-class',
event: ev,
translucent: true
});
console.log(ev)
return await popover.present();
}
}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { UserAdditionPageRoutingModule } from './user-addition-routing.module';
import { UserAdditionPage } from './user-addition.page';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
UserAdditionPageRoutingModule
],
declarations: [UserAdditionPage]
})
export class UserAdditionPageModule {}
<file_sep>import { NgModule } from "@angular/core";
import { PreloadAllModules, RouterModule, Routes } from "@angular/router";
import { LayoutPage } from "./layout.page";
const routes: Routes = [
{
path: "store",
component: LayoutPage,
children: [
{
path: "dashboard",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/dashboard/dashboard.module"
).then((m) => m.DashboardPageModule),
},
{
path: "products",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/products/products.module"
).then((m) => m.ProductsPageModule),
},
{
path: "add-products",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/products/product-addition/product-addition.module"
).then((m) => m.ProductAdditionPageModule),
},
{
path: "return-policy",
loadChildren: () => import('src/app/modules/admin-store/pages/products/return-policy/return-policy.module')
.then( m => m.ReturnPolicyPageModule)
},
{
path: "sales",
loadChildren: () =>
import("src/app/modules/admin-store/pages/sales/sales.module").then(
(m) => m.SalesPageModule
),
},
{
path: "users",
loadChildren: () =>
import("src/app/modules/admin-store/pages/users/users.module").then(
(m) => m.UsersPageModule
),
},
{
path: "settings",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/settings/settings.module"
).then((m) => m.SettingsPageModule),
},
{
path: "pricing",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/pricing/pricing.module"
).then((m) => m.PricingPageModule),
},
{
path: "report",
loadChildren: () =>
import(
"src/app/modules/admin-store/pages/reports/reports.module"
).then((m) => m.ReportsPageModule),
},
{
path: "",
redirectTo: "/admin-store/store/dashboard",
pathMatch: "full",
},
],
},
{
path: "",
redirectTo: "/admin-store/store/dashboard",
pathMatch: "full",
},
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class LayoutRoutingModule {}
<file_sep>import { NgModule } from '@angular/core';
import { GmFeedCardComponent } from './gm-feed-card/gm-feed-card';
@NgModule({
declarations: [GmFeedCardComponent],
imports: [],
exports: [GmFeedCardComponent]
})
export class GemionicUiModule {}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CartFacadeService } from "src/app/core-modules/services/cart/cart-facade/cart-facade.service";
import { Router } from "@angular/router";
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { ProductStateService } from "src/app/core-modules/services/products/product-state/product-state.service";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
@Component({
selector: "app-top-sales",
templateUrl: "./top-sales.component.html",
styleUrls: ["./top-sales.component.scss"],
})
export class TopSalesComponent implements OnInit {
//// TODO: Pass filtered values
public products;
reset = 1;
//Observale
public products$;
public label;
public data;
constructor(
private _productFacade: ProductFacadeService,
private cartFacade: CartFacadeService,
private router: Router,
private productState: ProductStateService,
private headerState: HeaderStateService,
private productApi: ProductApiService
) {}
ngOnInit() {
//get top sale products, assign to products
this.productApi
.getSegmentProducts({
value: null,
signal: "top_sales",
})
.subscribe(
(res) => {
this.products$ = res.products;
if (res.length == 0) {
this.data = true;
} else {
this.data = false;
}
},
(err) => {
console.log(err);
}
);
}
viewDetailPage(product) {
this.headerState.updateHeaderStatus("product_detail");
//navigate
this.router.navigate([`/user/landing/product-detail`, product._id]);
}
addToCart(product) {
this.cartFacade.addToCart(product);
}
loadTopSalesProducts() {
localStorage.setItem("all_p", "Top Sales");
this.productState.setSearchingProducts(this.products$);
this.productState.setMainProducts(this.products$);
this.router.navigate(["/landing/all-products"]);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { SingleDataSet, Label } from 'ng2-charts';
import { ChartType } from 'chart.js';
@Component({
selector: 'app-polar-chart',
templateUrl: './polar-chart.component.html',
styleUrls: ['./polar-chart.component.scss'],
})
export class PolarChartComponent implements OnInit {
// PolarArea
public polarAreaChartLabels: Label[] = ['Download Sales', 'In-Store Sales', 'Mail Sales', 'Telesales', 'Corporate Sales'];
public polarAreaChartData: SingleDataSet = [300, 500, 100, 40, 120];
public polarAreaLegend = true;
public polarAreaChartType: ChartType = 'polarArea';
public chartColors: Array<any> = [
{ // all colors in order
backgroundColor: ['#018786', ' #FFD700', ' #90DED7','#977C0C',' #e0bd00','#017776','#7fc3bd',' #856d0b']
}
]
ngOnInit() {}
// events
// events
public chartClicked({ event, active }: { event: MouseEvent, active: {}[] }): void {
console.log(event, active);
}
public chartHovered({ event, active }: { event: MouseEvent, active: {}[] }): void {
console.log(event, active);
}
}
<file_sep>const Validator = require("validator");
const isEmpty = require("is-empty");
module.exports = function validateRegisterInput(data) {
let errors = {};
// Check if data exists, if not declare empty string
data.name = !isEmpty(data.name) ? data.name : "";
data.email = !isEmpty(data.email) ? data.email : "";
data.phone = !isEmpty(data.phone) ? data.phone : "";
// Name Validation
if (Validator.isEmpty(data.name)) {
errors.name = "Name field is required";
}
// Email Validation
if (Validator.isEmpty(data.email)) {
errors.email = "Email field is required";
} else if (!Validator.isEmail(data.email)) {
errors.email = "Email is invalid";
}
// Phone Validation
if (Validator.isEmpty(data.email)) {
errors.phone = "phone field is required";
}
return {errors, isValid: isEmpty(errors)};
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { ReceiptFacadeService } from "./receipt-facade.service";
describe("ReceiptFacadeService", () => {
let service: ReceiptFacadeService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(ReceiptFacadeService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import {UnitsSoldComponent} from './units-sold/units-sold.component';
import {ReportsComponentsModule} from '../reports-components/reports-components.module';
import {DatePipe} from '@angular/common'
@NgModule({
declarations: [
UnitsSoldComponent
],providers:[DatePipe],
imports: [
CommonModule,
ReportsComponentsModule
],exports:[
UnitsSoldComponent
]
})
export class SubReportsModule { }
<file_sep>import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { FormsModule, ReactiveFormsModule } from "@angular/forms";
import { IonicModule } from "@ionic/angular";
import { DeliveryVerificationPageRoutingModule } from "./delivery-verification-routing.module";
import { DeliveryVerificationPage } from "./delivery-verification.page";
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
ReactiveFormsModule,
DeliveryVerificationPageRoutingModule,
],
declarations: [DeliveryVerificationPage],
})
export class DeliveryVerificationPageModule {}
<file_sep>const bcrypt = require("bcryptjs");
const createToken = require("./payload");
module.exports = function userToken(user, password, res) {
if (!user) {
return res.status(404).json({ emailnotfound: "You are not registered!" });
}
// Check password
if (user.admin){
createToken(user, res)
}else{
bcrypt.compare(password, user.password).then(isMatch => {
if (isMatch) {
// Create JWT Payload
createToken(user, res)
} else {
return res.status(400).json({ passwordincorrect: "Password is incorrect" });
}
}).catch(err => console.log("bcrypt: ",err));
}
}
<file_sep>const mongoose = require("mongoose");
// User Schema
const Schema = mongoose.Schema;
const ImageSchema = new Schema({
productImage: {
data: Buffer,
contentType: String
},
date: {
type: Date,
default: Date.now
}
});
// Image model
const Image = mongoose.model('images', ImageSchema);
module.exports = Image;
<file_sep>import { Component, Output, OnInit, ViewChild, EventEmitter, } from "@angular/core";
import { FormGroup, FormArray, FormBuilder, Validators } from "@angular/forms";
import { IonSlides } from "@ionic/angular";
import { ToastController } from "@ionic/angular";
import { AlertController } from "@ionic/angular";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
import { VoucherApiService } from "src/app/core-modules/services/vouchers/voucher-api/voucher-api.service";
import { Router } from "@angular/router";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { VoucherFacadeService } from 'src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service';
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-runsale",
templateUrl: "./runsale.component.html",
styleUrls: ["./runsale.component.scss"],
})
export class RunsaleComponent implements OnInit {
@ViewChild("addPromoSlider") slides: IonSlides;
@Output() notifyParent: EventEmitter<any> = new EventEmitter<any>();
public run_sale: boolean;
public slideTwo: FormGroup;
public slideThree: FormGroup;
public slideFour: FormGroup;
showBadgeSale: boolean = false;
showBadgeVoucher: boolean = false;
public productList: FormArray;
public products: any;
public showMe: boolean;
// on Selection promo_type
public discount: boolean;
public item_limit: boolean;
public secondary_Product: boolean;
public buy1get1free: boolean;
public volume: boolean;
public productId: string;
public promo_type: string;
device_screen;
showBack;
showNext;
slides_number = 1;
// returns all form groups under properties
get productFormGroup() {
return this.slideFour.get("products") as FormArray;
}
constructor(
private formBuilder: FormBuilder,
public toastController: ToastController,
public alertController: AlertController,
private voucherService: VoucherApiService,
private voucherFacade: VoucherFacadeService,
private userFacade: UserFacadeService,
private productApi: ProductApiService,
private router: Router,
breakpointObserver: BreakpointObserver,
) {
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
}
ngOnInit() {
//get current store
this.showBack = false;
this.showNext = true;
let store = this.userFacade.getCurrentStore();
this.productApi.getStoreProductsById({ id: store._id }).subscribe(
(res) => {
this.products = res.products;
},
(err) => {
console.log(err);
}
);
// slide forms
// First slide uses segments
this.slideTwo = this.formBuilder.group({
title: [""],
exp_date: [""],
});
this.slideThree = this.formBuilder.group({
type: [""],
platform: [[]],
});
this.slideFour = this.formBuilder.group({
products: this.formBuilder.array([this.createProduct()]),
});
// set productList to the form control containing propeties
this.productList = this.slideFour.get("products") as FormArray;
}
// concatinate all values to one object
createPromo() {
let store = this.userFacade.getCurrentStore();
let promo = {
run_sale: this.run_sale,
type: this.slideThree.value.type,
title: this.slideTwo.value.title,
products: this.slideFour.value.products,
total_quota: 15,
platform: this.slideThree.value.platform,
exp_date: this.slideTwo.value.exp_date,
};
this.voucherService
.createVoucher({ promo: promo, store_id: store._id })
.subscribe(
(res) => {
this.router.navigate(["/store/sales"]);
},
(err) => {
console.log(err);
}
);
}
// Sale or Voucher Selection
segmentChanged(value) {
let data = value.detail.value;
if (data == "sale") {
this.run_sale = true;
}
if (data == "voucher") {
this.run_sale = false;
}
}
changeSecProduct(value) {
this.productId = value.detail.value;
}
promo_typeOnChange(event) {
this.promo_type = event.detail.value;
switch (event.detail.value) {
case "buy1get1free":
this.buy1get1free = false;
this.secondary_Product = false;
this.volume = false;
break;
case "combo":
this.buy1get1free = true;
this.item_limit = false;
this.discount = true;
this.secondary_Product = true;
this.volume = false;
break;
case "volume":
this.buy1get1free = true;
this.discount = true;
this.item_limit = true;
this.volume = true;
this.secondary_Product = false;
break;
case "itemDiscount":
this.buy1get1free = true;
this.discount = true;
this.item_limit = false;
this.secondary_Product = false;
this.volume = false;
break;
}
}
swipeNextSale() {
setTimeout(() => {
this.slides.slideNext();
this.presentSaleToast();
}, 1000);
}
swipeNext() {
this.slides.slideNext();
}
swipeNextVoucher() {
setTimeout(() => {
this.slides.slideNext(), this.presentVoucherToast();
}, 1000);
}
next() {
this.slides.lockSwipes(false);
this.slides.slideNext();
}
slidesBack() {
this.slides.lockSwipes(false);
this.slides.slidePrev();
}
slideChange(event) {
this.slides.getActiveIndex().then((data) => {
this.slides_number = data + 1;
if (data == 3) {
this.showNext = false;
this.showBack = true;
} else if (data == 0) {
this.showBack = false;
this.showNext = true;
} else {
this.showNext = true;
this.showBack = true;
}
});
this.slides.lockSwipes(true);
}
async presentSaleToast() {
const toast = await this.toastController.create({
message: "Sale selected as promo type",
position: "bottom",
animated: true,
duration: 2000,
});
toast.present();
this.showBadgeSale = true;
this.showBadgeVoucher = false;
setTimeout(() => {
}, 2010);
}
async presentVoucherToast() {
const toast = await this.toastController.create({
message: "Voucher selected as Promo type.",
position: "bottom",
animated: true,
duration: 2000,
});
toast.present();
this.showBadgeVoucher = true;
this.showBadgeSale = false;
setTimeout(() => {
}, 2010);
}
/*here we will have to have a way of making sure it run once*/
// Generate new product
createProduct(): FormGroup {
return this.formBuilder.group({
p_id: [""],
p_quota: [""],
discount: [""],
items_exceeding: [""],
s_id: [""],
});
}
addProduct() {
this.productList.push(this.createProduct());
console.log(this.productFormGroup.controls)
}
removeProduct(index) {
this.productList.removeAt(index);
}
getProductFormGroup(index): FormGroup {
const formGroup = this.productList.controls[index] as FormGroup;
return formGroup;
}
onClickBack() {
this.notifyParent.emit("ad_promo");
console.log('dff')
}
}
<file_sep>import { TestBed } from "@angular/core/testing";
import { VoucherStateService } from "./voucher-state.service";
describe("VoucherStateService", () => {
let service: VoucherStateService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(VoucherStateService);
});
it("should be created", () => {
expect(service).toBeTruthy();
});
});
<file_sep>import { Component, OnInit, Input } from "@angular/core";
import { ProductApiService } from "src/app/core-modules/services/products/product-api/product-api.service";
@Component({
selector: "app-similar-products",
templateUrl: "./similar-products.component.html",
styleUrls: ["./similar-products.component.scss"],
})
export class SimilarProductsComponent implements OnInit {
@Input() category: any;
public similarProducts$;
constructor(private productApi: ProductApiService) {}
ngOnInit() {
this.productApi
.getSegmentProducts({
value: this.category,
signal: "similar_products",
})
.subscribe(
(res) => {
this.similarProducts$ = res.products;
},
(err) => {
console.log(err);
}
);
}
}
<file_sep>import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { FormsModule, ReactiveFormsModule } from "@angular/forms";
import { Routes, RouterModule } from "@angular/router";
import { IonicModule } from "@ionic/angular";
import { UpdateInfoPageRoutingModule } from "./update-info-routing.module";
import { UpdateInfoPage } from "./update-info.page";
import {MatExpansionModule} from '@angular/material/expansion';
const routes: Routes = [
{
path: "",
component: UpdateInfoPage,
},
];
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
ReactiveFormsModule,
UpdateInfoPageRoutingModule,
RouterModule.forChild(routes),
MatExpansionModule
],
declarations: [UpdateInfoPage],
})
export class UpdateInfoPageModule {}
<file_sep>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { AllProductsModule } from "./all-products/all-products.module";
import { IonicModule } from '@ionic/angular';
import { SeeAllProductsHomePageRoutingModule } from './see-all-products-home-routing.module';
import { SeeAllProductsHomePage } from './see-all-products-home.page';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
AllProductsModule ,
SeeAllProductsHomePageRoutingModule
],
declarations: [SeeAllProductsHomePage]
})
export class SeeAllProductsHomePageModule {}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Router } from "@angular/router";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { OrderFacadeService } from "src/app/core-modules/services/orders/order-facade/order-facade.service";
@Component({
selector: 'app-slides-layout',
templateUrl: './slides-layout.page.html',
styleUrls: ['./slides-layout.page.scss'],
})
export class SlidesLayoutPage implements OnInit {
constructor(
private router: Router,
private productFacade: ProductFacadeService,
private orderFacade: OrderFacadeService
) { }
ngOnInit() {
this.productFacade.loadVendorProducts();
//Load Orders
this.orderFacade.loadStoreOrders();
}
}
<file_sep>
module.exports = function verify(data, Model, res, emailToken) {
Model.findOne({ email : data.email }).then(user => {
//Check if user is verified
if(user.verified.token == emailToken){
if(user.verified.isVerified){
res.json({msg: user.email + " is already verified" })
}else{
// Set verified to true
user.verified.isVerified = true;
user.save().then(user => {
res.json({msg: user.email + " is succesfully verified"})
})
}
}else{
res.json({msg: "Your link is old and expired!!"})
}
}).catch(err => console.log(err));
}
<file_sep>const express = require("express");
const jwt = require("jsonwebtoken");
const keys = require("../../../config/users/keys");
const async = require("async");
const router = express.Router();
const authCheck = require("../../../validation/authenticate/checkMiddleware/jwtCheck");
// Load Product model
const Order = require("../../../models/store/Order");
const Receipt = require("../../../models/store/Receipt");
const Store = require("../../../models/store/Store");
const Product = require("../../../models/category/Product");
const Temp_Order = require("../../../models/store/Temp_Order");
const Order_Receipt = require("../../../models/store/Order_Receipt");
const Notification = require("../../../models/users/Notifications");
//delivery
router.post("/update_order_delivery", (req, res) => {
let data = req.body;
Order.findOne({
courier_ref: data.courier_ref,
courier_OTP: data.courier_OTP,
})
.then((order) => {
if (!order.fullfilled) {
order.fullfilled = true;
order.delivery_status = "Delivered";
order
.save()
.then((new_order) => {
//push notification
let today = new Date();
let msg = `Order ${new_order.order_number} was delivered on ${today}, please review order here`;
const new_notification = new Notification({
order: new_order,
store_id: new_order.storeId,
type: "Order_review",
title: "Activity log",
msg: msg,
user_email: new_order.user_email,
});
new_notification.save();
//return new_notification todo:
res.json({ order: new_order });
})
.catch((err) => console.log({ err: err, msg: "no notifcation" }));
} else {
res.json({ msg: "Sorry already fullfilled", order: order });
}
})
.catch((err) => console.log(err));
});
// store temp order
router.post("/create_order", authCheck, (req, res) => {
let data = req.body.data;
let user = req.authData;
if (data) {
const order = new Temp_Order({
user_email: user.email,
pf_processing: data,
m_payment_id: data.m_payment_id,
status: "INCOMPLETE",
cart: req.body.cart,
});
order
.save()
.then((order) => {
res.json({ order: order.order });
})
.catch((err) => console.log(err));
}
});
//get user order
router.get("/get_user_orders", authCheck, (req, res) => {
let user = req.authData;
Order.find({ user_email: user.email })
.then((orders) => {
if (orders) {
res.json({ orders: orders });
} else {
res.json({ message: "no order" });
}
})
.catch((err) => err);
});
// admin get orders
router.get("/get_store_orders", authCheck, (req, res) => {
let user = req.authData;
Order.find({ storeId: user.store_id })
.then((orders) => {
if (orders) {
res.json({ orders: orders });
} else {
res.json({ message: "no orders" });
console.log("no orders");
}
})
.catch((err) => console.log(err));
});
router.post("/get_store_ordersById", authCheck, (req, res) => {
let id = req.body.id;
console.log(req.body);
Order.find({ storeId: id })
.then((orders) => {
console.log(id, orders);
if (orders) {
res.json({ orders: orders });
} else {
res.json({ message: "no orders" });
console.log("no orders");
}
})
.catch((err) => console.log(err));
});
// admin Update order
router.post("/update_order_status", authCheck, (req, res) => {
console.log(req.body)
let data = req.body.data;
let store_id = req.body.store_id;
// Generate Random Number for order number
function randomString(length, chars) {
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
let chars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
if (data.commit_date) {
Order.findOne({ _id: data.order_id })
.then((order) => {
order.commit_date = data.commit_date;
order.delivery_ready = false;
order.delivery_status = `Order is being processed`;
order
.save()
.then((new_order) => {
//push notification
let msg = `Order ${new_order.order_number} is being processed and estimated delivery date is: ${data.commit_date}`;
const new_notification = new Notification({
order: new_order,
store_id: store_id,
type: "Order_progress",
title: "Order Progress",
msg: msg,
user_email: new_order.user_email,
});
new_notification.save();
res.json({ order: new_order });
})
.catch((err) => console.log(err));
})
.catch((err) => console.log(err));
} else if (
data.courier_name &&
data.courier_ref &&
data.delivery_type == "Courier"
) {
//update ready for delivery
let otp = randomString(5, "0123456789");
Order.findOne({ _id: data.order_id })
.then((order) => {
order.delivery_ready = true;
order.delivery_status = "Delivery on the Way";
order.delivery_type = data.delivery_type;
order.courier_name = data.courier_name;
order.courier_ref = data.courier_ref;
order.courier_OTP = otp;
order.delivery_date = data.delivery_date;
order
.save()
.then((new_order) => {
//push notification
let msg = `Order ${new_order.order_number} is ready and delivery date is ${data.delivery_date}. On delivery Please present OTP: ${otp} & Ref: ${data.courier_ref} to verify your order`;
const new_notification = new Notification({
order: new_order,
store_id: store_id,
type: "Order_progress",
title: "Order Progress",
msg: msg,
user_email: new_order.user_email,
});
new_notification.save();
res.json({ order: new_order });
})
.catch((err) => console.log(err));
})
.catch((err) => console.log(err));
} else if (data.delivery_type === "Self" && data.self) {
let ref = randomString(7, chars);
let otp = randomString(5, "0123456789");
Order.findOne({ _id: data.order_id })
.then((order) => {
order.delivery_ready = true;
order.delivery_status = "Delivery on the Way";
order.delivery_type = data.delivery_type;
order.courier_ref = ref;
order.courier_OTP = otp;
order.delivery_date = data.delivery_date;
order
.save()
.then((new_order) => {
let msg = `Order ${new_order.order_number} is ready and delivery date is ${data.delivery_date}. On delivery Please present OTP: ${otp} & Ref: ${ref} to verify your order`;
const new_notification = new Notification({
order: new_order,
store_id: store_id,
type: "Order_progress",
title: "Order Progress",
msg: msg,
user_email: new_order.user_email,
});
new_notification.save();
res.json({ order: new_order });
})
.catch((err) => console.log(err));
})
.catch((err) => console.log(err));
} else if (data.return_reason && data.return) {
//user return
Order.updateOne(
{ _id: data.order_id, "items._id": data.product._id },
{
$set: {
"items.$.return": data.return_reason,
return: data.return_reason,
},
},
(err, result) => {
if (err) {
res.status(500).json({ error: "Unable to update competitor." });
} else {
Order.findOne({ _id: data.order_id })
.then((order) => {
//push notification
let msg = `${data.product.productName} on Order ${order.order_number} return to supplier`;
const new_notification = new Notification({
store_id: store_id,
type: "Order_progress",
title: "Order Progress",
msg: msg,
order: order,
user_email: order.user_email,
});
new_notification.save();
res.json({ order: order });
})
.catch((err) => console.log(err));
}
}
);
} else {
//update order status
let product = data.item;
Order.updateOne(
{ _id: data.order_id, "items._id": product._id },
{ $set: { "items.$.order_status": data.product_status } },
(err, result) => {
if (err) {
res.status(500).json({ error: "Unable to update competitor." });
} else {
Order.findOne({ _id: data.order_id })
.then((order) => {
//push notification
let msg = `${product.productName} on Order ${order.order_number} is ${data.product_status}`;
const new_notification = new Notification({
store_id: store_id,
type: "Order_progress",
title: "Order Progress",
msg: msg,
order: order,
user_email: order.user_email,
});
new_notification.save();
res.json({ order: order });
})
.catch((err) => console.log(err));
}
}
);
}
});
// User update on delivery
//admin update delivery status
//PayfastS
router.post("/notify_url", (req, res) => {
//Receive the data posted by PayFast
const pfData = req.body;
function randomString(length, chars) {
var result = "";
for (var i = length; i > 0; --i)
result += chars[Math.floor(Math.random() * chars.length)];
return result;
}
let length = 5;
let chars =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
//Notify PayFast that the information has been received
if (pfData) {
res.status(200).send({ message: "Received, Thank you" });
} else {
res.status(400).send({ message: "No data!" });
}
//Perfom secuity checks
if (
//Verify the source IP address belongs to PayFast todo:
true
//'www.payfast.co.za' || 'sandbox.payfast.co.za' || 'w1w.payfast.co.za' || 'w2w.payfast.co.za' ||
) {
console.log(
"IPPPPPPPPPPP: ",
req.connection.remoteAddress,
req.headers["x-forwarded-for"]
);
//if lisence
if (pfData.item_name == "Lisence" && pfData.payment_status == "COMPLETE") {
Temp_Order.find({ m_payment_id: pfData.m_payment_id })
.then((order) => {
let data = order;
let lisence = data[0].cart[0];
console.log(data[0].pf_processing,pfData)
if (
//Verify the security signature is valid todo:
data[0].pf_processing.item_name == pfData.item_name &&
data[0].pf_processing.item_description == pfData.item_description &&
//Verify the payment amount matches your order amount
data[0].pf_processing.amount == pfData.amount_gross
//Verify the data received is valid
) {
Order.findOne({
m_payment_id: pfData.m_payment_id,
})
.then((oxo) => {
//Verify that the order hasn’t been processed already
if (!oxo) {
Store.find({ _id: lisence.store_id })
.then((store) => {
const receip = new Receipt({
m_payment_id: pfData.m_payment_id,
receipt: pfData,
});
receip.save();
Store.updateOne(
{ _id: lisence.store_id},
{ $push: { lisence: lisence } },
{ safe: true, upsert: true },
function (error, success) {
if (error) {
console.log(error);
} else {
console.log("store modified: ", success);
let receipt = new Order_Receipt({
m_payment_id: pfData.m_payment_id,
order_number: `License-${randomString(8, chars)}`,
payment_status: pfData.payment_status,
user_email: data[0].user_email,
store_id: store[0]._id,
delivery_address: null,
items: data[0].cart,
total_price: pfData.amount_gross,
status: "COMPLETE",
});
receipt
.save()
.then((re) => {
console.log(re);
console.log("receipt saved");
})
.catch((err) => {
console.log(err);
});
}
}
);
})
.catch((err) => err);
} else {
//log error attempt
console.log("Order Already made");
}
})
.catch((err) => console.log(err));
}
})
.catch((err) => err);
} else {
console.log("one");
if (pfData.payment_status == "COMPLETE" && pfData.item_name !== "Lisence") {
// Generate Random Number for order number
//make order
Temp_Order.find({ m_payment_id: pfData.m_payment_id })
.then((order) => {
let data = order[0];
if (
//Verify the security signature is valid todo:
data.pf_processing.item_name == pfData.item_name &&
//Verify the payment amount matches your order amount
data.pf_processing.amount == pfData.amount_gross
//Verify the data received is valid
) {
Order.findOne({
m_payment_id: pfData.m_payment_id,
})
.then((oxo) => {
//Verify that the order hasn’t been processed already
if (!oxo) {
// loop through cart and groupBy storeId
var groupBy = function (xs, key) {
return xs.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
// Group by Id
let grouped_id = groupBy(data.cart, "_id");
// new cart with merged product
let arr = [];
async.each(grouped_id, function (item, callback) {
if (item.length > 1) {
//merge the products then push to cart
let merged_product = {
amount: item[0].amount + item[1].amount,
category: item[0].category,
original_price: item[0].original_price,
productDescription: item[0].productDescription,
productName: item[0].productName,
productPrice: item[0].productPrice,
sale_type: item[0].sale_type,
storeId: item[0].storeId,
_id: item[0]._id,
};
arr.push(merged_product);
} else {
//push to array
arr.push({ ...item[0] });
}
});
// a loop that returns a new array with new property = order_status
let new_cart = arr.map((product) => {
let proo = { ...product, order_status: "pending" };
return proo;
});
let grouped_order = groupBy(new_cart, "storeId");
// Process order
async.each(grouped_order, function (item, callback) {
if (item) {
const order = new Order({
m_payment_id: pfData.m_payment_id,
order_number: randomString(length, chars),
payment_status: "COMPLETE",
user_email: data.user_email,
storeId: item[0].storeId,
commit_date: null,
items: item,
});
order
.save()
.then((new_order) => {
let sum = 0;
for (let i = 0; i < new_order.items.length; i++) {
sum += parseInt(
new_order.items[i].productPrice *
new_order.items[i].amount
);
} // TODO: check price formula
if (new_order && sum > 0) {
//generate receipt for each order
let receipt = new Order_Receipt({
m_payment_id: new_order.m_payment_id,
order_number: new_order.order_number,
payment_status: new_order.payment_status,
user_email: new_order.user_email,
store_id: new_order.storeId,
delivery_address: null,
items: new_order.items,
total_price: sum,
status: "Cartalist",
});
receipt
.save()
.then((new_r) => {
console.log(new_r);
})
.catch((err) => console.log(err));
}
})
.catch((err) => {
console.log(err);
});
}
});
// Update order to complete
data.status = "COMPLETE";
data.save();
//Save Payfast Receipt
const receipt = new Receipt({
m_payment_id: pfData.m_payment_id,
receipt: pfData,
});
receipt.save();
//Email the buyer confirming payment todo
} else {
console.log("order already exist");
}
})
.catch((err) => console.log(err));
}
})
.catch((err) => console.log(err));
Temp_Order.find({
m_payment_id: pfData.m_payment_id,
})
.then((order) => {
let cart = order[0].cart;
//Update related products
async.each(cart, function (item, callback) {
Product.find({ _id: item._id })
.then((product) => {
let current_stock = product[0].in_stock;
let current_sold = cart[0].amount;
let remaining_stock = current_stock - current_sold;
Product.updateOne(
{ _id: item._id },
{ $set: { sold: current_sold } },
(err, result) => {
if (err) {
console.log({ err: "Unable to update competitor." });
} else {
console.log(result);
}
}
);
if (remaining_stock <= 0) {
Product.updateOne(
{ _id: item._id },
{ $set: { in_stock: 0 } },
(err, result) => {
if (err) {
console.log({ err: "Unable to update competitor." });
} else {
console.log(result);
}
}
);
Product.updateOne(
{ _id: item._id },
{ $set: { production_status: "OutOfStock" } },
(err, result) => {
if (err) {
console.log({ err: "Unable to update competitor." });
} else {
console.log(result);
}
}
);
} else {
Product.updateOne(
{ _id: item._id },
{ $set: { in_stock: remaining_stock } },
(err, result) => {
if (err) {
console.log({ err: "Unable to update competitor." });
} else {
console.log(result);
}
}
);
}
})
.catch((err) => console.log(err));
});
})
.catch((err) => err);
} else {
console.log("order was not completed");
}
}
} else {
//record data
console.log("iP address not matcing");
}
});
module.exports = router;
<file_sep>import { Component, OnInit } from "@angular/core";
import { MenuStateService } from "src/app/core-modules/services/menus/menu-state/menu-state.service";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { distinctUntilChanged } from "rxjs/operators";
import { NavigationEnd, Router } from "@angular/router";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
import { HeaderStateService } from "src/app/core-modules/services/header/header-state/header-state.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
@Component({
selector: "app-admin-menu",
templateUrl: "./admin.component.html",
styleUrls: ["./admin.component.scss"],
})
export class AdminComponent implements OnInit {
token;
device_screen
public avatar;
public profile$;
actvated_tab ;
private value_of_desktop_side_menu;
constructor(
public menuState: MenuStateService,
private userFacade: UserFacadeService,
private router: Router,
breakpointObserver: BreakpointObserver,
private headerStateService: HeaderStateService,
private productFacade: ProductFacadeService,
){
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
}
});
router.events.subscribe(
(value)=>{
console.log(value)
if(value instanceof NavigationEnd){
this.changeMenuSideStatus(value.url)
}
}
)
}
ngOnInit() {
this.token = !!localStorage.getItem("token");
this.actvated_tab = this.headerStateService.getDesktopSideMenuState()
this.setDesktopSideMenuStateValue('dashboard')
this.userFacade
this.userFacade
.getUser$()
.pipe(distinctUntilChanged())
.subscribe(
(res) => {
this.profile$ = res;
if (this.profile$.profileImage) {
this.avatar = false;
} else {
this.avatar = true;
}
console.log(this.profile$);
},
(err) => {
console.log(err);
}
);
}
userShow(user) {
// TODO: test method
this.menuState.updateMenuStatus(user);
}
gotoSettings() {
localStorage.setItem("st", this.userFacade.getCurrentStore()._id);
this.router.navigate(["/admin-store/store/settings"]);
}
goMarketPlace() {
this.menuState.updateMenuStatus("user");
this.router.navigate(["/user/landing"]);
}
setDesktopSideMenuStateValue(data){
this.headerStateService.setDesktopSideMenuState(data)
}
changeMenuSideStatus(url){
switch (url) {
case '/admin-store/store/dashboard':
this.headerStateService.getDesktopSideMenuState().subscribe(
(res)=>{
this.value_of_desktop_side_menu = res
},(err)=>{
console.log(err)
}
)
if(this.value_of_desktop_side_menu !=='dashboard'){
this.headerStateService.setDesktopSideMenuState('dashboard')
}
break;
case '/admin-store/store/sales':
this.headerStateService.getDesktopSideMenuState().subscribe(
(res)=>{
this.value_of_desktop_side_menu = res
},(err)=>{
console.log(err)
}
)
this.headerStateService.setDesktopSideMenuState(this.value_of_desktop_side_menu )
break;
case '/admin-store/store/products':
this.headerStateService.getDesktopSideMenuState().subscribe(
(res)=>{
this.value_of_desktop_side_menu = res
},(err)=>{
console.log(err)
}
)
this.headerStateService.setDesktopSideMenuState(this.value_of_desktop_side_menu )
break;
case '/admin-store/store/users':
this.headerStateService.getDesktopSideMenuState().subscribe(
(res)=>{
this.value_of_desktop_side_menu = res
},(err)=>{
console.log(err)
}
)
if(this.value_of_desktop_side_menu !=='users'){
this.headerStateService.setDesktopSideMenuState('users')
}
break;
case '/admin-store/store/settings':
this.headerStateService.getDesktopSideMenuState().subscribe(
(res)=>{
this.value_of_desktop_side_menu = res
},(err)=>{
console.log(err)
}
)
if(this.value_of_desktop_side_menu !=='settings'){
this.headerStateService.setDesktopSideMenuState('settings')
}
break;
case '/admin-store/store/report':
this.headerStateService.getDesktopSideMenuState().subscribe(
(res)=>{
this.value_of_desktop_side_menu = res
},(err)=>{
console.log(err)
}
)
if(this.value_of_desktop_side_menu !=='reports'){
this.headerStateService.setDesktopSideMenuState('reports')
}
break;
default:
break;
}
console.log("function fires")
}
checkSideDesktopMenuStatus(state,tab){
if(state == tab){
return true
}else{
return false
}
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { Router } from "@angular/router";
import { UserFacadeService } from "src/app/core-modules/services/profile/profile-facade/profile-facade.service";
import { MenuStateService } from "src/app/core-modules/services/menus/menu-state/menu-state.service";
import { ActivatedRoute } from "@angular/router";
import { VoucherFacadeService } from "src/app/core-modules/services/vouchers/voucher-facade/voucher-facade.service";
import { ProductFacadeService } from "src/app/core-modules/services/products/product-facade/product-facade.service";
import { CategoryFacadeService } from "src/app/core-modules/services/categories/category-facade/category-facade.service";
import {BreakpointObserver, Breakpoints} from '@angular/cdk/layout';
@Component({
selector: "app-landing",
templateUrl: "./landing.page.html",
styleUrls: ["./landing.page.scss"],
})
export class LandingPage implements OnInit {
title = "Afro-Ballerina || User";
landing = {};
name: string;
has_footer;
//observables
profile$;
isUpdating$;
device_screen;
constructor(
private route: ActivatedRoute,
private router: Router,
public menuState: MenuStateService,
private userFacade: UserFacadeService,
private voucherFacade: VoucherFacadeService,
private _productFacade: ProductFacadeService,
private categoryFacade: CategoryFacadeService,
breakpointObserver: BreakpointObserver,
) {
////loading
this.isUpdating$ = this.userFacade.isUpdating$();
breakpointObserver.observe([
Breakpoints.Handset
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Handset"
}
});
breakpointObserver.observe([
Breakpoints.Tablet
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Tablet"
}
});
breakpointObserver.observe([
Breakpoints.Web
]).subscribe(result => {
if (result.matches) {
this.device_screen = "Web"
setTimeout(() => {
this.router.navigate(["/admin-store/store/dashboard"]);
}, 5000);
}
});
}
ngOnInit() {
//load menu
this.menuState.loggedIn();
///**************Market Place ****************//////
this.categoryFacade.loadCategories();
//get User profile
this.userFacade.loadUser();
this.profile$ = this.userFacade.getUser$();
this.voucherFacade.loadVouchers();
let current_route = this.route.snapshot["_routerState"].url;
if (current_route == "/user/landing/product-detail") {
this.has_footer = false;
} else {
this.has_footer = true;
}
}
}
<file_sep>const Validator = require("validator");
const isEmpty = require("is-empty");
module.exports = function validateCategoryInput(data) {
let errors = {};
// Check if data exists, if not declare empty string
data.categoryName = !isEmpty(data.categoryName) ? data.categoryName : "";
data.categoryDescription = !isEmpty(data.categoryDescription) ? data.categoryDescription : "";
data.categoryMetaTag = !isEmpty(data.categoryMetaTag) ? data.categoryMetaTag : "";
if(data.properties){
data.properties = data.properties
}else{
data.properties = ""
}
// Name Validation
if (Validator.isEmpty(data.categoryName)) {
errors.categoryName = "Category Name field is required";
}
// Description Validation
if (Validator.isEmpty(data.categoryDescription)) {
errors.categoryDescription = "Description field is required";
}
// MetaTag Validation
if (Validator.isEmpty(data.categoryMetaTag)) {
errors.categoryMetaTag = "MetaTag field is required";
}
// Property Validation
if (Validator.isEmpty(data.properties)) {
errors.properties = "Properties field is required";
}
}
<file_sep>import { Injectable } from "@angular/core";
import { ReceiptApiService } from "src/app/core-modules/services/shared/receipt/receipt-api/receipt-api.service";
import { ReceiptStateService } from "src/app/core-modules/services/shared/receipt/receipt-state/receipt-state.service";
@Injectable({
providedIn: "root",
})
export class ReceiptFacadeService {
constructor(
private receiptApi: ReceiptApiService,
private receiptState: ReceiptStateService
) {}
isUpdating$() {
return this.receiptState.isUpdating$();
}
getReceipts$() {
return this.receiptState.getReceipts$();
}
loadUserReceipts() {
this.receiptState.setUpdating(true);
this.receiptApi.getUserReceipts().subscribe(
//// TODO: make it generic
(res) => {
let receipts = res.receipts;
this.receiptState.setReceipt(receipts);
},
(err) => {
console.log(err);
},
() => this.receiptState.setUpdating(false)
);
}
loadStoreReceipts() {
this.receiptState.setUpdating(true);
this.receiptApi.getStoreReceipts().subscribe(
//// TODO: make it generic
(res) => {
let receipts = res.receipts;
this.receiptState.setReceipt(receipts);
},
(err) => {
console.log(err);
},
() => this.receiptState.setUpdating(false)
);
}
loadStoreReceiptsById(id) {
this.receiptState.setUpdating(true);
this.receiptApi.getStoreReceiptsById(id).subscribe(
//// TODO: make it generic
(res) => {
let receipts = res.receipts;
this.receiptState.setReceipt(receipts);
},
(err) => {
console.log(err);
},
() => this.receiptState.setUpdating(false)
);
}
// update receipt optimistic way
updateReceipt(data) {
this.receiptState.setUpdating(true);
//update receipt state
this.receiptState.updateReceipt(data);
//update database receipt item status
this.receiptApi.updateReceipt(data).subscribe(
(res) => {
console.log("item updated");
},
(err) => {
//we need to rollback
console.log(err);
},
() => this.receiptState.setUpdating(false)
);
}
}
<file_sep>import { Injectable } from "@angular/core";
import { Router } from "@angular/router";
import { ToastController } from "@ionic/angular";
import { HeaderStateService } from "../../../utils/header-state/header-state.service";
import { ProductApiService } from "../product-api/product-api.service";
import { ProductOperatorService } from "../product-operator/product-operator.service";
import { ProductStateService } from "../product-state/product-state.service";
@Injectable({
providedIn: "root",
})
export class ProductControllerService {
constructor(
private productApi: ProductApiService,
private productState: ProductStateService,
private productOperator: ProductOperatorService,
public toastController: ToastController,
private headerState: HeaderStateService,
private router: Router
) {}
subscribeLoading$() {
return this.productState.subscribeLoading();
}
/********************DETAIL PRODUCT PAGE *********************/
//prepareObsvForDetailProduct
async prepareDetailState(params, product) {
//similar products
await this.loadSimilarProducts(params);
await this.loadDatabaseProduct(product._id);
//set states
this.headerState.updateHeaderStatus("product_detail");
await this.productState.setDetailProduct(product);
return;
}
async loadDetailProduct(product) {
console.log(product);
// activate loading signal
await this.productState.updateLoading(true);
//configure
let params = this.productOperator.getSimilarProductsParams(product);
await this.prepareDetailState(params, product);
await this.productOperator.updateLocalStorageDetailProduct(product);
await this.productState.setStackProducts(product);
//deactivate loading signal
await this.productState.updateLoading(false);
//detail page ready to be viewed
return this.productState.getDetailProduct();
}
async loadDetailProductById(_id) {
// activate loading signal
this.productState.updateLoading(true);
// Configure product
await this.productApi.getProduct({ id: _id }).subscribe(
(res) => {
let res_product = res.product[0];
let product = this.productOperator.localStorageProductConfig(
res_product
);
let params = this.productOperator.getSimilarProductsParams(product);
this.prepareDetailState(params, product);
this.productOperator.updateLocalStorageDetailProduct(product);
this.productState.setStackProducts(product);
//deactivate loading signal
this.productState.updateLoading(false);
//return from state
return this.productState.getDetailProduct();
},
(err) => {
console.log(err);
}
);
}
getDetailProduct() {
return this.productState.getDetailProduct();
}
reloadPreviousItem() {
let product = this.productState.reloadPreviousItem();
this.loadDetailProduct(product);
}
removePreviousItem() {
this.productState.removePreviousItem();
}
getStackProducts() {
return this.productState.getStackProducts();
}
loadDatabaseProduct(id) {
//activate loading signal
this.productState.updateLoading(true);
this.productApi.getProduct({ id: id }).subscribe((res) => {
//deactivate loading signal
this.productState.updateLoading(false);
return this.productState.setDataBaseProduct(res.product);
});
}
getDataBaseProduct() {
return this.productState.getDataBaseProduct();
}
loadSimilarProducts(data) {
//activate loading signal
this.productState.updateLoading(true);
//configure
this.productApi.getSegmentProducts(data).subscribe(
(res) => {
this.productState.setSimilarProducts(res.products);
//deactivate loading signal
this.productState.updateLoading(false);
},
(err) => {
console.log(err);
}
);
}
getSimilarProducts() {
return this.productState.getSimilarProducts();
}
updateLike(product) {
//update database then update a list
if (!!localStorage.getItem("token")) {
this.productApi.updateLike({ _id: product._id }).subscribe(
(res) => {
product.likes = res.product.likes;
this.productState.setDetailProduct(product);
},
(err) => {
console.log(err);
}
);
} else {
this.router.navigate(["/user/login"]);
}
}
/********************END DETAIL PRODUCT PAGE *********************/
/********************VENDOR PRODUCTS PAGE *********************/
getStoreProducts$() {
return this.productState.getStoreProducts();
}
loadMarketStoreProducts(id) {
this.productApi.getStoreProductsById({ id: id }).subscribe(
(res) => {
let products = res.products;
this.productState.setStoreProducts(products);
this.productState.setOptimumStoreProducts(products);
},
(err) => {
console.log(err);
}
);
}
sortVendorProducts(property: string, order: string) {
this.productState.getOptimumStoreProducts().subscribe(
(products) => {
let sorted_products;
if (order == "asc") {
sorted_products = products.sort((a, b) =>
a[property] > b[property]
? 1
: a[property] === b[property]
? a[property] > b[property]
? 1
: -1
: -1
);
} else {
sorted_products = products.sort((a, b) =>
a[property] < b[property]
? 1
: a[property] === b[property]
? a[property] < b[property]
? 1
: -1
: -1
);
}
return this.productState.setStoreProducts(sorted_products);
},
(err) => {
console.log(err);
}
);
}
onMarketSearch(val) {
this.productState.getSearchingProducts().subscribe(
(res) => {
if (val == "") {
//if nothing to search, set main state to original products
return this.productState.setMainProducts(res);
} else {
//if keywords exist to search
let searched = res.filter((item: any) => {
return (
item.productName.toLowerCase().indexOf(val.toLowerCase()) > -1
);
});
return this.productState.setMainProducts(searched);
}
},
(err) => {
console.log(err);
}
);
}
/********************END VENDOR PRODUCTS PAGE *********************/
/********************PROFILE PAGE *********************/
updateReview(product) {
//update database then update a list
this.productApi.updateReview(product).subscribe(
(res) => {
if (res.product) {
this.presentToast("Product review added");
this.productState.setCurrentProduct([res.product]);
} else {
//toast message
console.log(res);
this.presentToast(res.msg);
}
},
(err) => {
console.log(err);
}
);
}
setCurrentProduct(product) {
return this.productState.setCurrentProduct(product);
}
getCurrentProduct() {
return this.productState.getCurrentProduct();
}
loadCurrentProduct(_id) {
this.productApi.getProduct({ id: _id }).subscribe(
(res) => {
return this.productState.setCurrentProduct(res.product);
},
(err) => {
console.log(err);
}
);
}
loadUserReviews() {
//update database then update a list
this.productApi.getUserReviews().subscribe(
(res) => {
this.productState.setUserReviews(res.products_reviews);
},
(err) => {
console.log(err);
}
);
}
getUserReviews() {
return this.productState.getUserReviews();
}
loadUserlikedProducts() {
//update database then update a list
this.productApi.getUserlikedProducts().subscribe(
(res) => {
this.productState.setUserLikedProducts(res.products);
},
(err) => {
console.log(err);
}
);
}
getUserlikedProducts() {
return this.productState.getUserLikedProducts();
}
/********************END PROFILE PAGE *********************/
/********************All PRODUCTS PAGE *********************/
//load all products
loadProducts() {
this.productApi.getProducts().subscribe(
(res) => {
let products = res.products;
//check vouchers
this.productState.setProducts(products);
},
(err) => {
console.log(err);
}
);
}
getProducts$() {
return this.productState.getProducts$();
}
setMainProducts(data) {
return this.productState.setMainProducts(data);
}
setSearchingProducts(data) {
return this.productState.setSearchingProducts(data);
}
getMainProducts() {
return this.productState.getMainProducts();
}
loadSegmentProducts(val, signal) {
/* load category for main list component */
let value;
if (signal == "category") {
value = val;
} else {
value = null;
}
this.productApi
.getSegmentProducts({ value: value, signal: signal })
.subscribe(
(res) => {
let products = res.products;
console.log(products);
this.productState.setSegmentsProducts(products, signal);
},
(err) => {
console.log(err);
}
);
}
getSegmentProducts(data) {
return this.productApi.getSegmentProducts(data);
}
sortProducts(property: string, order: string) {
this.productState.getSearchingProducts().subscribe(
(products) => {
let sorted_products;
if (order == "asc") {
sorted_products = products.sort((a, b) =>
a[property] > b[property]
? 1
: a[property] === b[property]
? a[property] > b[property]
? 1
: -1
: -1
);
} else {
sorted_products = products.sort((a, b) =>
a[property] < b[property]
? 1
: a[property] === b[property]
? a[property] < b[property]
? 1
: -1
: -1
);
}
console.log("sorted: ", sorted_products);
return this.productState.setMainProducts(sorted_products);
},
(err) => {
console.log(err);
}
);
}
//Filtering products through
filteringProducts(
property: string,
sub_property: string,
value: string,
all_p
) {
if (all_p) {
this.productState.getProducts$().subscribe(
(products) => {
let searched;
searched = products.filter((item) => {
return item[property] == value;
});
if (searched.length > 0) {
return this.productState.setMainProducts(searched);
} else {
searched = products.filter((item) => {
return item[property][sub_property] == value;
});
if (searched.length > 0) {
return this.productState.setMainProducts(searched);
} else {
return this.productState.setMainProducts([]);
}
}
},
(err) => {
console.log(err);
}
);
} else {
this.productState.getSearchingProducts().subscribe(
(products) => {
let searched;
searched = products.filter((item) => {
return item[property] == value;
});
if (searched.length > 0) {
return this.productState.setMainProducts(searched);
} else {
searched = products.filter((item) => {
return item[property][sub_property] == value;
});
if (searched.length > 0) {
return this.productState.setMainProducts(searched);
} else {
return this.productState.setMainProducts([]);
}
}
},
(err) => {
console.log(err);
}
);
}
}
/********************END All PRODUCTS PAGE *********************/
async presentToast(msg) {
const toast = await this.toastController.create({
color: "secondary",
message: msg,
duration: 2500,
});
toast.present();
}
}
<file_sep>import { Component, OnInit } from "@angular/core";
import { ProductStateService } from "src/app/core-modules/services/products/product-state/product-state.service";
import { Router } from "@angular/router";
@Component({
selector: "app-promo-slides",
templateUrl: "./promo-slides.component.html",
styleUrls: ["./promo-slides.component.scss"],
})
export class PromoSlidesComponent implements OnInit {
products$;
public slides = [
"<ion-card>hgh</ion-card","tyeyte",
]
constructor(
private productState: ProductStateService,
private router: Router
) {}
ngOnInit() {}
loadLikedProducts() {
this.products$.subscribe((res) => {
this.productState.setSearchingProducts(res);
this.productState.setMainProducts(res);
this.router.navigate(["/landing/all-products"]);
});
}
}
<file_sep>const Validator = require("validator");
const isEmpty = require("is-empty");
module.exports = function validateProfileUpdateInput(data) {
let errors = {};
// Check if data exists, if not declare empty string
data.first_name = !isEmpty(data.first_name) ? data.first_name : "";
data.last_name = !isEmpty(data.last_name) ? data.last_name : "";
data.address = !isEmpty(data.address) ? data.address : "";
if (Validator.isEmpty(data.first_name)) {
errors.first_name = "first_name is required";
}
if (Validator.isEmpty(data.user_type)) {
errors.last_name = "last_name is required";
}
if (Validator.isEmpty(data.address)) {
errors.address = "address is required";
}
//Satinize input noise
//password
data.first_name = Validator.trim(data.first_name);
data.first_name = Validator.escape(data.first_name);
data.last_name = Validator.trim(data.last_name);
data.last_name = Validator.escape(data.last_name);
data.address = Validator.trim(data.address);
data.address = Validator.escape(data.address);
return { errors, isValid: isEmpty(errors) };
};
<file_sep>export var single = [
{
"name": "Marketing",
"value": 8940000
},
{
"name": "Sales",
"value": 5000000
}
]; | 1005b710b58fdd936dc900a4a346d20bd890b8bb | [
"JavaScript",
"TypeScript",
"HTML"
] | 200 | TypeScript | petoriT/desktop-2 | 70ae46c2dd98ebffca53968c020e4eaf6abc20fa | da802d782e112bac67f86333cd7387168f0520e2 |
refs/heads/master | <file_sep>var infowindow;
function addMarker(map, lat, long, name, id, imagen, ciudad, direccion) {
var myLatlng = new google.maps.LatLng(lat, long);
var marker = new google.maps.Marker({
position: myLatlng,
map: map,
title: name,
icon: "http://maps.google.com/mapfiles/ms/icons/blue-dot.png"
});
google.maps.event.addListener(marker, "click", function() {
url = "detalles.html?id=" + id;
if (infowindow)
infowindow.close();
infowindow = new google.maps.InfoWindow({
content: '<p><b>' + name + '</b></p>' +
'<p>' + direccion + '</p>' +
'<p>' + ciudad + ' </p>' +
'<img src="' + imagen + '.JPG">'
});
infowindow.open(map, marker);
});
}
function csvToJson(map) {
Papa.parse("http://opendata.taro.ull.es/datos/dataset/e77fd886-a0fd-42cc-80fe-a544379e340b/resource/d25ac836-17c6-421b-b0e0-2ef3c68beec8/download/oficinas.csv", {
download: true,
complete: function(results) {
for (i = 1; i < results['data'].length - 1; i++) {
lat = results['data'][i][4];
latVec= lat.split("")
latStr = latVec[0] + latVec[1] + "."
for (var j = 2; j < latVec.length; j++)
latStr += latVec[j]
long = results['data'][i][5];
longVec = long.split("")
longStr = longVec[0] + longVec[1] + longVec[2] + "."
for (var j = 3; j < longVec.length; j++)
longStr += longVec[j]
lat = (latStr)
long = (longStr)
imagen = results['data'][i][6]
name = results['data'][i][1];
id = results['data'][i][0];
ciudad = results['data'][i][2];
direccion = results['data'][i][3];
addMarker(map, lat, long, name, id, imagen, ciudad, direccion);
}
}
});
}
function load() {
var mapOptions = {
zoom: 11
};
var map = new google.maps.Map(document.getElementById('map-canvas'), mapOptions);
var pos;
if(navigator.geolocation) {
navigator.geolocation.getCurrentPosition(function(position) {
pos = new google.maps.LatLng(position.coords.latitude, position.coords.longitude);
var marker = new google.maps.Marker({
position: pos,
map: map,
title: '¡Estás aquí!',
icon: "http://maps.google.com/mapfiles/ms/icons/blue-pushpin.png"
});
map.setCenter(pos);
});
}
else {
// El navegador no soporta geolocalización
var centerLatlng2 = new google.maps.LatLng(28.268052, -16.623890);
var mapOptions2 = {
zoom: 10,
center: centerLatlng2
}
var map2 = new google.maps.Map(document.getElementById('map-canvas'), mapOptions2);
csvToJson(map2);
}
csvToJson(map);
}
function handleNoGeolocation(errorFlag) {
if (errorFlag) {
alert('Uno');
var content = 'Error: The Geolocation service failed.';
} else {
alert('Dos');
var content = 'Error: Your browser doesn\'t support geolocation.';
}
}
google.maps.event.addDomListener(window, 'load', load);<file_sep>
function getParameterByName(name) {
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
results = regex.exec(location.search);
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
}
function printDetails(nombre, descripcion, tipo, condiciones, validez, imagen) {
html = "<h3>" + nombre + "</h3> " +
descripcion + "<br/>" +
"<b>" + condiciones + "</b>" + "<br>" +
validez + "<br>" +
"<img class='img img-responsive' src='"+ imagen +".JPG'>";
$("#detalles").append(html);
}
function csvToJson() {
Papa.parse("http://opendata.taro.ull.es/datos/dataset/e77fd886-a0fd-42cc-80fe-a544379e340b/resource/d25ac836-17c6-421b-b0e0-2ef3c68beec8/download/oficinas.csv", {
download: true,
complete: function(results) {
var id = getParameterByName('id');
for (i = 0; i < results['data'].length; i++) {
if (results['data'][i][13] == id) {
nombre = results['data'][i][1];
descripcion = results['data'][i][2];
tipo = results['data'][i][4];
condiciones = results['data'][i][5];
validez = results['data'][i][9];
imagen = results['data'][i][10];
printDetails(nombre, descripcion, tipo, condiciones, validez, imagen);
}
}
}
});
}
$(document).ready ( function(){
csvToJson();
});<file_sep>LUNO
====
###Aplicación para <NAME> - Máster en Ingeniería Informática
###Universidad de La Laguna, Tenerife
##### Dirección y gestión de proyectos TIC
-----------
# Instalación
* Instalar phonegap
* Navegar al fichero del proyecto /carnetjoven
* Ejecutar (en consola o cmd): 'phonegap build android'
* Se compilará un .apk en \platforms\android\ant-build
* Para añadir iOS, consultar la documentación de Phonegap. 'phonegap platform add ios' 'phonegap build ios'
* Para modificar los ficheros de la página web, simplemente se han de tocar los html o js de /www/
# Documentación, ficheros y rutas
Estas son las rutas de los CSV y el HTML dinámico que hay que retocar para re-compilar la aplicación
* Url oficinas.csv /www/js/detalle.js línea 23
* Url ofertas.csv /www/js/mapa.js línea 32
* Url ofertas.csv /www/empresas.html línea 77
* Url Información estática /www/info.html línea 62 <iframe src=X>
Cualquier duda, se pueden poner en contacto con cualquiera de los miembros del repositorio, o a través del profesor.
| 59d74abaa33cac43c9f100e02e5ac2af7238fbb6 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | Rulox/LUNO | 96fe8a631ee445b83c23ccdc4de5eeac061e77a3 | f028a13430f2420550704ccd87d0d93797333d88 |
refs/heads/master | <repo_name>PeterMond/Vulerable-XSS-Application<file_sep>/VulnerableXSS/Controllers/XSSController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using VulnerableXSS.Models;
using System.Text.Encodings.Web;
namespace VulnerableXSS.Controllers
{
public class XSSController : Controller
{
public IActionResult Index()
{
return View();
}
[HttpGet]
public IActionResult SimpleReflected()
{
XssBasicFields model = new XssBasicFields
{
NameVulnerable = " ",
NameFixed = " "
};
return View(model);
}
[HttpPost]
public IActionResult SimpleReflected(XssBasicFields model)
{
if(model.NameFixed == null)
{
model.NameFixed = " ";
}
if (model.NameVulnerable == null)
{
model.NameVulnerable = " ";
}
return View(model);
}
[HttpGet]
public IActionResult JavaScriptContext()
{
XssBasicFields model = new XssBasicFields
{
NameVulnerable = " ",
NameFixed = " "
};
return View(model);
}
[HttpPost]
public IActionResult JavaScriptContext(XssBasicFields model)
{
if (model.NameFixed == null)
{
model.NameFixed = " ";
}
if (model.NameVulnerable == null)
{
model.NameVulnerable = " ";
}
return View(model);
}
[HttpGet]
public IActionResult HtmlAttributeContext()
{
XssBasicFields model = new XssBasicFields
{
NameVulnerable = " ",
NameFixed = " "
};
return View(model);
}
[HttpPost]
public IActionResult HtmlAttributeContext(XssBasicFields model)
{
if (model.NameFixed == null)
{
model.NameFixed = " ";
}
if (model.NameVulnerable == null)
{
model.NameVulnerable = " ";
}
return View(model);
}
}
}<file_sep>/VulnerableXSS/Models/XssBasicFields.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace VulnerableXSS.Models
{
public class XssBasicFields
{
public string NameVulnerable { get; set; }
public string NameFixed { get; set; }
}
}
<file_sep>/README.md
# Vulnerable XSS Application
## Purpose
This is an ASP.NET MVC application I made in order to learn more about XSS and how to prevent it. The application has pages that contain an example that is vulnerable to XSS and one that is fixed with encoding. There is also additional documentation on what is actually going on in each example to ensure that the fix is clear.
## Running the Application
The application was developed using ASP.NET Core 3. If you choose to run it outside of Docker, then go to https://dotnet.microsoft.com and get the latest version of .NET Core for your platform. The application is cross platform so it will work on Windows, Linux, MAC OS X, and any other platform supported by Microsoft. The code builds successfully under both Visual Studio 2017 and 2019 and can be run both under IIS. When you run it using the key combination ctrl+F5, the application will open in your default browser.
## Payloads
| Page/Context | Payload | Why it Works | Quick Fix |
|--------------|---------|--------------|-----------|
| XSS in basic HTML context| <script>alert(1)</script> | The data is reflected right back on the the page in an HTML context with being encoded | HTML encoding before the page is returned |
| XSS in JavaScript Context | "; alert(1); " | The data is directly put into a JavaScript variable. You can use the quotes and semicolon to end that command and execute your own arbitary JavaScript after that. | JavaScript string encode the data before it is placed into the DOM |
| XSS in HTML attribute context | " onmouseover="alert(1) | The data is put into a value parameter in an HTML tag without being encoded. adding a quote allows you to end the value attribute and add an even handler and run JavaScript | HTML encoding before the page is returned | | 1904a8b4fa3b2157d6835ccb8165df63aff954bf | [
"Markdown",
"C#"
] | 3 | C# | PeterMond/Vulerable-XSS-Application | 49382c77b95fe6cb694c5e8a669ef70eed7f372c | f2437145dcde92c8368d84a2d945a5b038766237 |
refs/heads/master | <file_sep>def introduce
line = "----------------------------"
me = {}
puts "お名前"
me[:name] = gets.chomp
puts "年齢"
me[:old] = gets.chomp
puts "一言"
me[:word] = gets.chomp
puts "名前:#{me[:name]}"
puts line
puts "年齢:#{me[:old]}"
puts line
puts "一言:\n#{me[:word]}"
end
while true do
puts "自己紹介する?"
puts "yes→1,no→2"
input = gets.to_i
if input == 1
introduce
exit
elsif input == 2
exit
else
puts "1か2かでお願いします"
end
end | 782e5ba68a9c7f371fb5fa08fd13733aec5acddb | [
"Ruby"
] | 1 | Ruby | harchica/IntroduceRuby | 54ee093b46acdf8706dcb887b855ce16a1bef70e | dacae5c356d08aa491ba8445d2295a0a26d9c92a |
refs/heads/master | <file_sep>package ir.piana.dev.core.api.func;
import ir.piana.dev.core.api.helper.TransactionHelper;
import rx.functions.Action1;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface ReadOnlyAction1<T, E extends Throwable> extends Action1<T> {
void executeReadOnly(T t) throws E;
@Override
default void call(T t) {
TransactionHelper.getBean().executeReadOnly(() -> executeReadOnly(t));
}
}
<file_sep>package ir.piana.dev.core.api.repository;
import ir.piana.dev.core.api.entity.PianaEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
import org.springframework.data.repository.PagingAndSortingRepository;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 5:16 PM
**/
@NoRepositoryBean
public interface PianaEntityRepository<T extends PianaEntity, ID>
extends PagingAndSortingRepository<T, ID>, JpaRepository<T, ID> {
}
<file_sep>package ir.piana.dev.core.api.swagger;
/**
* @author <NAME> (<EMAIL>)
* Date: 6/16/2019 5:53 PM
**/
public interface BaseApiClient {
String getBasePath();
String getServicePath();
String getServiceHost();
Integer getServicePort();
BaseApiClient setBasePath(String basePath);
}
<file_sep>package ir.piana.dev.test.api.impl;
import io.reactivex.Completable;
import io.reactivex.Single;
import ir.piana.dev.test.server.api.dto.PostDto;
import ir.piana.dev.test.server.api.dto.ResponseDto;
import ir.piana.dev.test.server.api.service.TestApi;
import ir.piana.dev.user.client.api.service.UserApi;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* @author <NAME> (<EMAIL>)
* Date: 6/18/2019 2:21 PM
**/
@Component
public class TestApiImpl implements TestApi {
@Autowired
private UserApi userApi;
@Override
public Completable getTest() {
return Completable.fromAction(() -> {
userApi.sayHello().subscribe(System.out::println);
});
}
@Override
public Single<ResponseDto> postTest(PostDto argument) {
ResponseDto responseDto = new ResponseDto();
responseDto.setEmail("hey");
return Single.just(responseDto);
}
}
<file_sep>jar {
baseName = 'digi-football'
archivesBaseName = baseName
manifest.attributes title: 'Digi Footall Core'
}<file_sep>package ir.piana.dev.core.api.exception;
import java.util.Map;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/10/2019 2:08 PM
**/
public class IllegalArgumentException extends BaseRuntimeException {
public IllegalArgumentException(String messageCode) {
super(messageCode);
}
public IllegalArgumentException(String messageCode, Map<String, Object> params) {
super(messageCode, params);
}
public IllegalArgumentException(Throwable cause, String messageCode) {
super(cause, messageCode);
}
public IllegalArgumentException(Throwable cause, String messageCode, Map<String, Object> params) {
super(cause, messageCode, params);
}
}
<file_sep>package ir.piana.dev.core.api.helper;
import ir.piana.dev.core.api.exception.ApiBaseException;
import ir.piana.dev.core.api.exception.ApiRuntimeBaseException;
import ir.piana.dev.core.api.exception.BaseRuntimeException;
import ir.piana.dev.core.api.exception.TransactionException;
import ir.piana.dev.core.api.func.Consumer;
import ir.piana.dev.core.api.func.Function;
import ir.piana.dev.core.api.func.Runnable;
import ir.piana.dev.core.api.func.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import static org.springframework.transaction.annotation.Propagation.REQUIRES_NEW;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:52 PM
**/
@Component
public class TransactionHelper {
private final static Logger logger = LoggerFactory.getLogger(TransactionHelper.class);
private static final String ACTION_ERROR_CODE = "com.bitex.common.action";
private static final String ACTION_ERROR_MESSAGE = "Error occurred in execution of the action";
private static final String FALLBACK_ERROR_CODE = "com.bitex.common.fallback";
private static final String FALLBACK_ERROR_MESSAGE = "Error occurred in handling failure";
private static TransactionHelper BEAN;
@Autowired
private PlatformTransactionManager transactionManager;
public static TransactionHelper getBean() {
return BEAN;
}
@PostConstruct
private void setBean() {
TransactionHelper.BEAN = this;
}
public <E extends Throwable> void executeInTransaction(Runnable<E> action) {
executeInTransaction(REQUIRES_NEW, action);
}
public <R, E extends Throwable> R executeInTransaction(Supplier<R, E> action) {
return executeInTransaction(REQUIRES_NEW, action);
}
public <E extends Throwable> void executeInTransaction(Propagation propagation, Runnable<E> action) {
doWithTransaction(propagation, toFunction(action));
}
public <R, E extends Throwable> R executeInTransaction(Propagation propagation, Supplier<R, E> action) {
return doWithTransaction(propagation, toFunction(action));
}
public <E1 extends Throwable, E2 extends Throwable> void executeInTransaction(Runnable<E1> action, Consumer<Throwable, E2> fallback) {
executeInTransaction(REQUIRES_NEW, action, REQUIRES_NEW, fallback);
}
public <R, E1 extends Throwable, E2 extends Throwable> R executeInTransaction(Runnable<E1> action, Function<Throwable, R, E2> fallback) {
return executeInTransaction(REQUIRES_NEW, action, REQUIRES_NEW, fallback);
}
public <R, E1 extends Throwable, E2 extends Throwable> R executeInTransaction(Supplier<R, E1> action, Consumer<Throwable, E2> fallback) {
return executeInTransaction(REQUIRES_NEW, action, REQUIRES_NEW, fallback);
}
public <E1 extends Throwable, E2 extends Throwable> Object executeInTransaction(Supplier<?, E1> action, Function<Throwable, ?, E2> fallback) {
return executeInTransaction(REQUIRES_NEW, action, REQUIRES_NEW, fallback);
}
public <R, E1 extends Throwable, E2 extends Throwable> R executeInTransaction(Function<TransactionStatus, R, E1> action, Consumer<Throwable, E2> fallback) {
return executeInTransaction(REQUIRES_NEW, action, REQUIRES_NEW, fallback);
}
public <E1 extends Throwable, E2 extends Throwable> Object executeInTransaction(Function<TransactionStatus, ?, E1> action, Function<Throwable, ?, E2> fallback) {
return executeInTransaction(REQUIRES_NEW, action, REQUIRES_NEW, fallback);
}
public <E1 extends Throwable, E2 extends Throwable> void executeInTransaction(Propagation actionPropagation, Runnable<E1> action, Propagation fallbackPropagation, Consumer<Throwable, E2> fallback) {
executeInTransaction(actionPropagation, toFunction(action), fallbackPropagation, toFunction(fallback));
}
@SuppressWarnings("unchecked")
public <R, E1 extends Throwable, E2 extends Throwable> R executeInTransaction(Propagation actionPropagation, Runnable<E1> action, Propagation fallbackPropagation, Function<Throwable, R, E2> fallback) {
return (R) executeInTransaction(actionPropagation, toFunction(action), fallbackPropagation, fallback);
}
@SuppressWarnings("unchecked")
public <R, E1 extends Throwable, E2 extends Throwable> R executeInTransaction(Propagation actionPropagation, Supplier<R, E1> action, Propagation fallbackPropagation, Consumer<Throwable, E2> fallback) {
return (R) executeInTransaction(actionPropagation, action, fallbackPropagation, toFunction(fallback));
}
public <E1 extends Throwable, E2 extends Throwable> Object executeInTransaction(Propagation actionPropagation, Supplier<?, E1> action, Propagation fallbackPropagation, Function<Throwable, ?, E2> fallback) {
return executeInTransaction(actionPropagation, toFunction(action), fallbackPropagation, fallback);
}
@SuppressWarnings("unchecked")
public <R, E1 extends Throwable, E2 extends Throwable> R executeInTransaction(Propagation actionPropagation, Function<TransactionStatus, R, E1> action, Propagation fallbackPropagation, Consumer<Throwable, E2> fallback) {
return (R) executeInTransaction(actionPropagation, action, fallbackPropagation, toFunction(fallback));
}
public <E1 extends Throwable, E2 extends Throwable> Object executeInTransaction(Propagation actionPropagation, Function<TransactionStatus, ?, E1> action, Propagation fallbackPropagation, Function<Throwable, ?, E2> fallback) {
final Object actionResult = executeAction(actionPropagation, action);
if (!(actionResult instanceof Throwable)) return actionResult;
final Object fallbackResult = executeFallback(fallbackPropagation, fallback, (Throwable) actionResult);
if (fallbackResult instanceof Throwable) {
logger.error(FALLBACK_ERROR_MESSAGE, (Throwable) fallbackResult);
propagateException((Throwable) fallbackResult, FALLBACK_ERROR_CODE);
}
return fallbackResult;
}
public <R, E extends Throwable> R doWithTransaction(Function<TransactionStatus, R, E> action) {
return doWithTransaction(REQUIRES_NEW, action);
}
@SuppressWarnings("unchecked")
public <R, E extends Throwable> R doWithTransaction(
Propagation propagation, Function<TransactionStatus, R, E> action) {
final Object actionResult = executeAction(propagation, action);
if (actionResult instanceof Throwable) {
logger.error(ACTION_ERROR_MESSAGE, (Throwable) actionResult);
propagateException((Throwable) actionResult, ACTION_ERROR_CODE);
}
return (R) actionResult;
}
public <E extends Throwable> void executeReadOnly(Runnable<E> action) {
executeReadOnly(toSupplier(action));
}
@SuppressWarnings("unchecked")
public <R, E extends Throwable> R executeReadOnly(Supplier<R, E> action) {
final Object actionResult = readOnlyAction(action);
if (actionResult instanceof Throwable) {
logger.error(ACTION_ERROR_MESSAGE, actionResult);
propagateException((Throwable) actionResult, ACTION_ERROR_CODE);
}
return (R) actionResult;
}
public <E1 extends Throwable, E2 extends Throwable> void executeReadOnly(Runnable<E1> action, Consumer<Throwable, E2> fallback) {
executeReadOnly(toSupplier(action), toFunction(fallback));
}
@SuppressWarnings("unchecked")
public <R, E1 extends Throwable, E2 extends Throwable> R executeReadOnly(Runnable<E1> action, Function<Throwable, R, E2> fallback) {
return (R) executeReadOnly(toSupplier(action), fallback);
}
@SuppressWarnings("unchecked")
public <R, E1 extends Throwable, E2 extends Throwable> R executeReadOnly(Supplier<R, E1> action, Consumer<Throwable, E2> fallback) {
return (R) executeReadOnly(action, toFunction(fallback));
}
@SuppressWarnings("unchecked")
public <E1 extends Throwable, E2 extends Throwable> Object executeReadOnly(Supplier<?, E1> action, Function<Throwable, ?, E2> fallback) {
final Object actionResult = readOnlyAction(action);
if (!(actionResult instanceof Throwable)) return actionResult;
final Object fallbackResult = readOnlyFallback(fallback, (Throwable) actionResult);
if (fallbackResult instanceof Throwable) {
logger.error(FALLBACK_ERROR_MESSAGE, (Throwable) fallbackResult);
propagateException((Throwable) fallbackResult, FALLBACK_ERROR_CODE);
}
return fallbackResult;
}
private <E extends Throwable> Object executeAction(
Propagation propagation, Function<TransactionStatus, ?, E> action) {
final TransactionTemplate transactionTemplate = new TransactionTemplate(
transactionManager);
transactionTemplate.setPropagationBehavior(propagation.value());
return transactionTemplate.execute(transaction ->
safeExecute(transaction, action, transaction));
}
private <E extends Throwable> Object executeFallback(Propagation propagation, Function<Throwable, ?, E> fallback, Throwable cause) {
final TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setPropagationBehavior(propagation.value());
return transactionTemplate.execute(transaction -> safeExecute(transaction, fallback, cause));
}
private <T, E extends Throwable> Object safeExecute(
TransactionStatus transaction,
Function<T, ?, E> function,
T functionArgument) {
try {
final Object result = function.apply(functionArgument);
transaction.flush();
return result;
} catch (Throwable t) {
transaction.setRollbackOnly();
return t;
}
}
private <E extends Throwable> Object readOnlyAction(Supplier<?, E> action) {
final TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setPropagationBehavior(REQUIRES_NEW.value());
transactionTemplate.setReadOnly(true);
return transactionTemplate.execute(transaction -> {
try {
return action.get();
} catch (Throwable t) {
return t;
} finally {
transaction.setRollbackOnly();
}
});
}
private <E extends Throwable> Object readOnlyFallback(Function<Throwable, ?, E> fallback, Throwable cause) {
final TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setPropagationBehavior(REQUIRES_NEW.value());
transactionTemplate.setReadOnly(true);
return transactionTemplate.execute(transaction -> {
try {
return fallback.apply(cause);
} catch (Throwable t) {
return t;
} finally {
transaction.setRollbackOnly();
}
});
}
private void propagateException(Throwable cause, String defaultErrorCode) {
if (cause instanceof BaseRuntimeException) {
throw (BaseRuntimeException) cause;
}
if (cause instanceof ApiBaseException) {
final ApiBaseException remoteException = (ApiBaseException) cause;
throw new ApiRuntimeBaseException(remoteException, remoteException.getMessageCode());
}
if (cause instanceof ApiBaseException) {
final ApiBaseException baseException = (ApiBaseException) cause;
throw new TransactionException(baseException, baseException.getMessageCode(), baseException.getParams());
}
throw new TransactionException(cause, defaultErrorCode);
}
private <E extends Throwable> Supplier<?, E> toSupplier(Runnable<E> runnable) {
return () -> {
runnable.run();
return null;
};
}
private <E extends Throwable> Function<TransactionStatus, ?, E> toFunction(Runnable<E> runnable) {
return argument -> {
runnable.run();
return null;
};
}
private <R, E extends Throwable> Function<TransactionStatus, R, E> toFunction(Supplier<R, E> supplier) {
return argument -> supplier.get();
}
private <T, E extends Throwable> Function<T, ?, E> toFunction(Consumer<T, E> consumer) {
return argument -> {
consumer.accept(argument);
return null;
};
}
}
<file_sep>rootProject.name = 'gradle-digi-football'
include 'core'
include 'user'
include 'test'<file_sep>DROP TABLE IF EXISTS users;
CREATE TABLE users(
id INT PRIMARY KEY AUTO_INCREMENT,
email VARCHAR (50) UNIQUE NOT NULL,
password VARCHAR (64) NOT NULL,
verified INT NOT NULL default 0,
version INT NOT NULL,
created_on TIMESTAMP NOT NULL,
last_login TIMESTAMP
);
<file_sep>package ir.piana.dev.core.api.func;
import ir.piana.dev.core.api.helper.TransactionHelper;
import java.util.concurrent.Callable;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface TransactionalFunc0<R, E extends Throwable>
extends Callable<R>, TransactionalFunc<R> {
R executeInTransaction() throws E;
@Override
default R call() {
return TransactionHelper.getBean()
.doWithTransaction(transaction ->
checkError(executeInTransaction(), transaction));
}
}
<file_sep>package ir.piana.dev.core.api.swagger;
import com.netflix.hystrix.exception.HystrixRuntimeException;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.eventbus.DeliveryOptions;
import io.vertx.core.eventbus.Message;
import io.vertx.core.json.JsonObject;
import ir.piana.dev.core.api.dto.ErrorDto;
import ir.piana.dev.core.api.exception.ApiBaseException;
import ir.piana.dev.core.api.exception.AssertionException;
import ir.piana.dev.core.api.exception.ResourceNotFoundException;
import ir.piana.dev.core.api.exception.TransactionException;
import ir.piana.dev.core.api.helper.ExceptionHelper;
import ir.piana.dev.core.vertx.json.Json;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import java.util.LinkedHashMap;
import java.util.Optional;
import static com.github.phiz71.vertx.swagger.router.SwaggerRouter.CUSTOM_STATUS_CODE_HEADER_KEY;
import static org.springframework.core.NestedExceptionUtils.getRootCause;
/**
* @author <NAME> (<EMAIL>)
* Date: 6/16/2019 5:52 PM
**/
public abstract class BaseApiVerticle extends AbstractVerticle {
private static final Logger logger = LoggerFactory.getLogger(BaseApiVerticle.class);
protected static final String INTERNAL_ERROR_STATUS_CODE = "500";
protected static final String NOT_FOUND_STATUS_CODE = "404";
protected static final String BAD_REQUEST_STATUS_CODE = "400";
@Autowired
private ExceptionHelper exceptionHelper;
protected final void registerConsumer(
String httpMethod, String serviceId, Callback service) {
final String method = httpMethod.toUpperCase();
vertx.eventBus().<JsonObject>consumer(serviceId).handler(message -> {
logger.debug(String.format("'%s %s' is called", method, serviceId));
final Long startTime = System.currentTimeMillis();
try {
service.execute(message, serviceId, method, startTime);
} catch (Throwable t) {
handleException(message, serviceId, method, t, startTime);
}
});
}
protected final void logCompletion(
String serviceId, String httpMethod, Long startTime) {
final Long executionTime = System.currentTimeMillis() - startTime;
logger.info(String.format("The '%s %s' is executed successfully in '%d' millisecond",
httpMethod, serviceId, executionTime));
}
protected final void handleException(
Message<JsonObject> message,
String serviceId, String httpMethod,
Throwable userCause, Long startTime) {
userCause = (userCause instanceof HystrixRuntimeException) ? userCause.getCause() : userCause;
/*
** do action later
*/
final ErrorDto errorDto = new ErrorDto(
exceptionHelper.getErrorCode(userCause),
exceptionHelper.getErrorMessage(userCause),
exceptionHelper.getErrorParams(userCause));
// final ErrorDto errorDto = new ErrorDto("500", "internal server error", new LinkedHashMap());
final Throwable mainCause = Optional.ofNullable(getRootCause(userCause)).orElse(userCause);
final Long executionTime = System.currentTimeMillis() - startTime;
logger.error(String.format("Error occurred during execution of '%s %s' in '%d' millisecond: ",
httpMethod, serviceId, executionTime), mainCause);
final DeliveryOptions options = new DeliveryOptions();
options.addHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE);
String statusCode = INTERNAL_ERROR_STATUS_CODE;
if(userCause instanceof TransactionException) {
statusCode = INTERNAL_ERROR_STATUS_CODE;
} else if (userCause instanceof IllegalArgumentException ||
userCause instanceof AssertionException) {
statusCode = BAD_REQUEST_STATUS_CODE;
} else if (userCause instanceof ResourceNotFoundException) {
statusCode = NOT_FOUND_STATUS_CODE;
} else if (userCause instanceof ApiBaseException) {
statusCode = ((ApiBaseException) userCause).getStatusCode().toString();
}
options.addHeader(CUSTOM_STATUS_CODE_HEADER_KEY, statusCode);
message.reply(Json.encode(errorDto), options);
}
@FunctionalInterface
protected interface Callback {
void execute(
Message<JsonObject> message,
String serviceId, String httpMethod, Long startTime)
throws Exception;
}
}
<file_sep>package ir.piana.dev.core.api.func;
import io.reactivex.functions.Function;
import ir.piana.dev.core.api.helper.TransactionHelper;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface ReadOnlyFunc1<T, R, E extends Throwable> extends Function<T, R> {
R executeReadOnly(T t) throws E;
@Override
default R apply(T t) {
return TransactionHelper.getBean().executeReadOnly(() -> executeReadOnly(t));
}
}
<file_sep>buildscript {
ext {
springBootVersion = '2.1.6.RELEASE'
springCloudVersion = 'Greenwich.RELEASE'
// vertxVersion = '3.5.3'
vertxVersion = '3.7.1'
h2Version = '1.4.196'
jedisVersion = '2.9.0'
eurekaClientVersion = '1.9.2'
apacheCommonsVersion = '3.4'
icuVersion = '59.1'
swaggerPluginVersion = '2.12.0'
swaggerAnnotationsVersion = '1.5.20'
swaggerCodeGeneratorVersion = '2.3.1'
vertxSwaggerVersion = '1.6.0'
sonarqubeVersion = '2.6.2'
embeddedRedisVersion = '0.6'
logstashLogbackEncoderVersion = '5.2'
logbackClassicVersion = '1.2.3'
lombokVersion = '1.18.4'
querydslVersion = '4.2.1'
testContainersVersion = '1.8.0'
cucumberVersion = '4.0.0'
oltuVersion = "1.0.1"
retrofitVersion = "2.3.0"
junitVersion = "5.+"
jsonFireVersion = "1.8.0"
picocontainerVersion = "2.15"
mockitoVersion = "2.+"
hazelcastVersion = "3.10.6"
javaJwtVersion = "3.4.+"
mapstructVersion= "1.3.0.Final"
twilioVersion="7.32.0"
awaitility="3.1.6"
}
repositories {
mavenCentral()
maven { url 'https://plugins.gradle.org/m2' }
}
dependencies {
classpath "org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}"
classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:${swaggerPluginVersion}"
classpath "org.sonarsource.scanner.gradle:sonarqube-gradle-plugin:${sonarqubeVersion}"
classpath "net.ltgt.gradle:gradle-apt-plugin:0.19"
}
}
apply plugin: "org.sonarqube"
apply from: file('gradle/heroku/clean.gradle')
allprojects {
apply plugin: 'java'
}
subprojects {
// Project group and version
group 'ir.piana.dev'
version '1.0-SNAPSHOT'
tasks.withType(JavaCompile) {
options.compilerArgs = [
'-Amapstruct.suppressGeneratorTimestamp=true'
]
}
// General Plugins
apply plugin: 'jacoco'
apply plugin: 'io.spring.dependency-management'
apply plugin: 'net.ltgt.apt'
// General configurations such as java version, charset, and etc
def javaVersion = '1.8'
def defaultEncoding = 'UTF-8'
sourceCompatibility = javaVersion
targetCompatibility = javaVersion
tasks.withType(JavaCompile) { options.encoding = defaultEncoding }
tasks.withType(Javadoc) { options.encoding = defaultEncoding }
sourceSets.main.java.srcDir "${buildDir}/generated/source/apt/main"
sourceSets.test.java.srcDir "${buildDir}/generated/source/apt/test"
repositories {
mavenCentral()
maven { url 'https://plugins.gradle.org/m2' }
}
// Artifacts
jar {
manifest.attributes provider: 'gradle'
manifest.attributes version: version
}
task sourcesJar(type: Jar, dependsOn: classes) {
classifier = 'sources'
from sourceSets.main.allSource
}
task javadocJar(type: Jar, dependsOn: javadoc) {
classifier = 'javadoc'
from javadoc.destinationDir
}
artifacts {
archives sourcesJar
// archives javadocJar // Uncomment this to generate javadoc whenever you want.
}
test {
// maxParallelForks = 4
}
dependencyManagement {
dependencies {
// dependency 'io.reactivex:rxjava:1.3.8'
}
imports {
mavenBom "org.springframework.boot:spring-boot-dependencies:${springBootVersion}"
mavenBom "org.springframework.cloud:spring-cloud-dependencies:${springCloudVersion}"
}
}
if (it.name == 'web-server') {
dependencies {
compile 'org.springframework.boot:spring-boot-starter-security'
compile 'org.springframework.boot:spring-boot-starter-webflux'
compile 'org.springframework.cloud:spring-cloud-starter-netflix-zuul'
testCompile 'org.springframework.boot:spring-boot-starter-test'
testCompile 'io.projectreactor:reactor-test'
testCompile 'org.springframework.security:spring-security-test'
}
apply plugin: 'org.springframework.boot'
bootJar {
classifier = 'boot'
}
task copyToLib(type: Copy, group: 'build') {
from "$buildDir/libs"
into "$rootProject.buildDir/libs"
}
copyToLib {
dependsOn 'bootJar'
}
task stage(group: 'build', dependsOn: 'copyToLib')
build {
dependsOn stage
}
return
}
dependencies {
compileOnly "org.springframework.boot:spring-boot-configuration-processor"
compile 'org.springframework:spring-web'
compile 'org.springframework.boot:spring-boot-starter-mail'
compile 'com.github.ulisesbocchio:jasypt-spring-boot-starter:2.1.1'
compile "org.springframework.boot:spring-boot-starter-actuator"
compile "org.springframework.boot:spring-boot-starter-aop"
compile "org.springframework.boot:spring-boot-starter-validation"
compile "org.springframework.boot:spring-boot-starter-data-jpa"
compile "org.springframework.boot:spring-boot-starter-data-redis"
compile "org.springframework.boot:spring-boot-starter-batch"
compile "org.springframework.boot:spring-boot-starter-logging"
compile "org.springframework.boot:spring-boot-starter-cache"
compile "org.springframework.cloud:spring-cloud-starter-netflix-hystrix"
compile "org.springframework.cloud:spring-cloud-starter-bus-kafka"
compile "org.springframework.boot:spring-boot-starter-freemarker"
compile "org.hibernate:hibernate-envers"
compile "io.vertx:vertx-core:${vertxVersion}"
compile "io.vertx:vertx-web:${vertxVersion}"
compile "io.vertx:vertx-web-client:${vertxVersion}"
compile("io.vertx:vertx-junit5:${vertxVersion}") {
exclude group: 'org.junit.jupiter'
}
testCompile group: 'junit', name: 'junit', version: '4.12'
compile "redis.clients:jedis:${jedisVersion}"
compile "com.h2database:h2:${h2Version}"
compile "org.apache.commons:commons-lang3:${apacheCommonsVersion}"
compile "com.fasterxml.jackson.datatype:jackson-datatype-joda"
compile "com.ibm.icu:icu4j:${icuVersion}"
compile "net.logstash.logback:logstash-logback-encoder:${logstashLogbackEncoderVersion}"
compile "ch.qos.logback:logback-classic:${logbackClassicVersion}"
compile "org.projectlombok:lombok:${lombokVersion}"
compile("io.swagger:swagger-codegen:${swaggerCodeGeneratorVersion}") {
exclude group: 'org.slf4j'
exclude group: 'com.google.code.gson'
}
compile "com.github.phiz71:vertx-swagger-router:${vertxSwaggerVersion}"
compile fileTree(dir: "${rootProject.projectDir}/libs", include: "*.jar")
compile "com.querydsl:querydsl-jpa:${querydslVersion}"
compile "com.querydsl:querydsl-apt:${querydslVersion}"
compile "com.hazelcast:hazelcast-spring:${hazelcastVersion}"
compile "org.mapstruct:mapstruct:${mapstructVersion}"
compile "org.mapstruct:mapstruct-jdk8:${mapstructVersion}"
compile group: 'com.google.guava', name: 'guava', version: '28.0-jre'
// https://mvnrepository.com/artifact/com.github.akarnokd/rxjava2-interop
compile group: 'com.github.akarnokd', name: 'rxjava2-interop', version: '0.13.7'
// https://mvnrepository.com/artifact/postgresql/postgresql
// compile group: 'postgresql', name: 'postgresql', version: '9.1-901-1.jdbc4'
// https://mvnrepository.com/artifact/org.postgresql/postgresql
compile group: 'org.postgresql', name: 'postgresql', version: '42.2.6'
annotationProcessor("org.mapstruct:mapstruct-processor:${mapstructVersion}")
annotationProcessor("org.projectlombok:lombok:${lombokVersion}")
}
test {
useJUnitPlatform()
}
if (it.name == 'core') return
if (it.name == 'web-server') return
dependencies {
compile project(':core')
}
apply plugin: 'org.springframework.boot'
jar {
enabled = true
}
bootJar {
classifier = 'boot'
}
apply plugin: 'org.hidetake.swagger.generator'
dependencies {
compile "org.springframework.cloud:spring-cloud-starter-config"
swaggerCodegen project(':core')
swaggerCodegen("io.swagger:swagger-codegen-cli:${swaggerCodeGeneratorVersion}") {
exclude group: 'ch.qos.logback'
exclude group: 'org.slf4j'
}
}
ext.packageName = toPackageName(project.name)
swaggerSources {
"${packageName}Server" {
inputFile = file("src/main/resources/static/${packageName}/swagger/api.yaml")
code {
language = 'ir.piana.dev.core.generator.swagger.SpringVertXCodegen'
outputDir = file("${buildDir}/generated/swagger/server")
templateDir = file("${rootProject.projectDir}/core/src/main/resources/swagger/core/template/JavaVertXServer")
additionalProperties = [
'rxInterface' : 'true',
'dateLibrary' : 'legacy',
'apiPackage' : "ir.piana.dev.${packageName}.server.api.service",
'modelPackage': "ir.piana.dev.${packageName}.server.api.dto",
]
}
}
}
swaggerSources."${packageName}Server".code.dependsOn ':core:build'
compileJava.dependsOn swaggerSources."${packageName}Server".code
sourceSets.main.java.srcDir "${swaggerSources."${packageName}Server".code.outputDir}/src/main/java"
sourceSets.main.resources.srcDir "${swaggerSources."${packageName}Server".code.outputDir}/src/main/resources"
ext.buildClients = { modules ->
modules.each { module ->
ext.packageName = toPackageName(module)
swaggerSources {
"${packageName}Client" {
inputFile = file("${rootProject.projectDir}/${module}/src/main/resources/static/${packageName}/swagger/api.yaml")
code {
language = 'java'
outputDir = file("${buildDir}/generated/swagger/client/${module}")
templateDir = file("$rootProject.projectDir/core/src/main/resources/swagger/core/template/Java/libraries/vertx")
additionalProperties = [
'library' : 'vertx',
'dateLibrary' : 'legacy',
'useRxJava2' : 'true',
'apiPackage' : "ir.piana.dev.${packageName}.client.api.service",
'modelPackage': "ir.piana.dev.${packageName}.client.api.dto",
]
}
}
}
swaggerSources."${packageName}Client".code.dependsOn ':core:build'
compileJava.dependsOn swaggerSources."${packageName}Client".code
sourceSets.main.java.srcDir "${swaggerSources."${packageName}Client".code.outputDir}/src/main/java"
}
}
configurations {
mapstruct
querydslapt
}
task generateQueryDSL(type: JavaCompile, group: 'build', description: 'Generates the QueryDSL query types') {
source = sourceSets.main.java
classpath = configurations.compile + configurations.querydslapt
options.compilerArgs = [
'-proc:only',
'-processor', 'com.querydsl.apt.jpa.JPAAnnotationProcessor' +
',lombok.launch.AnnotationProcessorHider$AnnotationProcessor'
]
destinationDir = file("${buildDir}/generated/querydsl/")
}
sourceSets.main.java.srcDir "${buildDir}/generated/querydsl/"
compileJava {
dependsOn generateQueryDSL
}
generateQueryDSL.dependsOn {
tasks.findAll { task -> task.name.startsWith('generateSwaggerCode') }
}
task copyToLib(type: Copy, group: 'build') {
from "$buildDir/libs"
into "$rootProject.buildDir/libs"
}
copyToLib {
dependsOn 'bootJar'
}
task stage(group: 'build', dependsOn: 'copyToLib')
build {
dependsOn stage
}
// task stage(dependsOn: ['clean', 'build'])
// stage.dependsOn {
// copyToLib
// }
// apply from: file("$rootProject.projectDir/gradle/heroku/stage.gradle")
}
sonarqube {
properties {
property "sonar.exclusions", "**/build/generated/**"
}
}
static def toPackageName(String value) {
return value.toLowerCase().replaceAll("_", "").replaceAll("-", "")
}<file_sep>package ir.piana.dev.core.api.exception;
import java.util.Map;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/10/2019 2:15 PM
**/
public class AssertionException extends BaseRuntimeException {
public AssertionException(String messageCode) {
super(messageCode);
}
public AssertionException(String messageCode, Map<String, Object> params) {
super(messageCode, params);
}
public AssertionException(Throwable cause, String messageCode) {
super(cause, messageCode);
}
public AssertionException(Throwable cause, String messageCode, Map<String, Object> params) {
super(cause, messageCode, params);
}
}
<file_sep>package ir.piana.dev.user.business.operation;
import io.reactivex.*;
import ir.piana.dev.core.api.exception.IllegalArgumentException;
import ir.piana.dev.user.business.data.entity.UserEntity;
import ir.piana.dev.user.business.data.service.UserService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import java.util.*;
/**
* @author <NAME> (<EMAIL>)
* Date: 6/18/2019 3:19 PM
**/
@Component
@Profile("develop")
public class UserOperationDevelop implements UserOperation {
private Logger logger = LoggerFactory.getLogger(UserOperationDevelop.class);
@Value("${piana.profile-name}")
private String pName;
@Value("${piana.email.send}")
private boolean sendMail;
@Autowired
private EmailOperation emailOperation;
@Autowired
private UserService userService;
private Random random = new Random();
@Value("${piana.email.link.prefix}")
private String linkPrefix;
private Map<String, String> uuidMap = new LinkedHashMap<>();
public Completable sendVerificationCode(String email) {
return userService.findOrCreate(email)
.flatMap(userEntity ->
(SingleSource<UserEntity>) sin -> {
if(userEntity.getVerified() > 0)
sin.onError(new IllegalArgumentException("email-already-existed-and-verified"));
sin.onSuccess(userEntity);
}
).flatMapCompletable(userEntity -> (CompletableSource) next -> {
String link = linkPrefix + "user/credential/sign-up/verify";
if (uuidMap.containsKey(email)) {
uuidMap.remove(email);
}
UUID uuid = UUID.randomUUID();
String linkVar = Base64.getEncoder().encodeToString(
uuid.toString().concat(":").concat(email).getBytes());
link = link.concat("?link=" + linkVar);
uuidMap.put(email, uuid.toString());
if (sendMail) {
emailOperation.sendEmail(email, "ارسال مجدد فعالسازی", link);
logger.info("email send : " + link);
} else
logger.info("email not send : " + link);
next.onComplete();
});
}
@Override
public Completable verifyEmailByLink(String link) {
return Completable.create(emitter -> {
String decLink = new String(Base64.getDecoder().decode(link.getBytes()));
String uuid = decLink.split(":")[0];
String mail = decLink.split(":")[1];
if (uuidMap.containsKey(mail) && uuid.equalsIgnoreCase(uuidMap.get(mail))) {
emitter.onComplete();
}
emitter.onError(new IllegalArgumentException("invalid-link"));
});
}
// public static void main(String[] args) throws NoSuchAlgorithmException {
// SecretKeySpec secret_key = new SecretKeySpec("my-pass".getBytes(), "HmacSHA256");
//
// String sha256hex = Hashing.hmacSha256(secret_key)
// .hashString("ali", StandardCharsets.UTF_8)
// .toString();
// String s2 = Hashing.hmacSha256(secret_key)
// .hashString("ali", StandardCharsets.UTF_8)
// .toString();
//
// System.out.println(sha256hex);
// System.out.println(sha256hex.length());
// }
}
<file_sep>package ir.piana.dev.core.api.func;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface Consumer<T, E extends Throwable> {
void accept(T t) throws E;
}<file_sep>package ir.piana.dev.user.api.impl;
import io.reactivex.Single;
import io.reactivex.SingleSource;
import ir.piana.dev.user.business.operation.UserOperation;
import ir.piana.dev.user.server.api.dto.*;
import ir.piana.dev.user.server.api.service.UserApi;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* @author <NAME> (<EMAIL>)
* Date: 6/16/2019 6:19 PM
**/
@Component
public class UserApiImpl implements UserApi {
@Autowired
private UserOperation userOperation;
@Override
public Single<ResponseDto> signup(SignUpDto argument) {
return userOperation.sendVerificationCode(argument.getEmail())
.toSingle(() -> {
ResponseDto responseDto = new ResponseDto();
responseDto.setCode(0);
responseDto.setCargo("verification link send to email");
return responseDto;
});
}
@Override
public Single<ResponseDto> signupVerify(String link) {
return userOperation.verifyEmailByLink(link)
.andThen((SingleSource<ResponseDto>) singleObserver -> {
ResponseDto responseDto = new ResponseDto();
responseDto.setCode(0);
responseDto.setCargo("verification successfully");
singleObserver.onSuccess(responseDto);
});
}
@Override
public Single<ResponseDto> resetPassword(SignupPasswordDto argument, Long xUserId) {
return Single.just(new ResponseDto());
/*return userOperation.setPassword(argument.getPassword(), argument.getPassword())
.map(b -> {
ResponseDto responseDto = new ResponseDto();
if(b) {
responseDto.setCode(0);
responseDto.setCargo("password reset");
} else {
responseDto.setCode(1);
responseDto.setCargo("password not reset");
}
return responseDto;
});*/
}
@Override
public Single<ResponseDto> login(LoginDto argument) {
return null;
}
@Override
public Single<ResponseDto> retrievePersonInfo() {
return null;
}
@Override
public Single<ResponseDto> savePersonInfo(PersonInfoDto argument) {
return null;
}
@Override
public Single<SampleDto> sayHello() {
return Single.just(new SampleDto().builder().message("hello world!").build());
}
@Override
public Single<ResponseDto> setPersonPicture(PersonPictureDto argument) {
return null;
}
@Override
public Single<ResponseDto> unsetPersonPicture() {
return null;
}
@Override
public Single<ResponseDto> updatePersonInfo(PersonInfoDto argument) {
return null;
}
}
<file_sep>package ir.piana.dev.core.api.exception;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/10/2019 1:46 PM
**/
public class BaseException extends Exception implements PianaException {
private final String messageCode;
private final Map<String, Object> params = new HashMap<>();
public BaseException(String messageCode) {
this(messageCode, Collections.emptyMap());
}
public BaseException(String messageCode, Map<String, Object> params) {
super(messageCode);
this.messageCode = messageCode;
this.params.putAll(params);
}
public BaseException(Throwable cause, String messageCode) {
this(cause, messageCode, Collections.emptyMap());
}
public BaseException(Throwable cause, String messageCode, Map<String, Object> params) {
super(messageCode, cause);
this.messageCode = messageCode;
this.params.putAll(params);
}
public String getMessageCode() {
return messageCode;
}
public Map<String, Object> getParams() {
return params;
}
}
<file_sep>jar {
baseName = 'digi-football-user'
archivesBaseName = baseName
manifest.attributes title: 'Digi Footall User'
}
//buildClients(['project-name'])<file_sep>package ir.piana.dev.core.api.func;
import io.reactivex.functions.BiFunction;
import ir.piana.dev.core.api.helper.TransactionHelper;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface ReadOnlyFunc2<T1, T2, R, E extends Throwable> extends BiFunction<T1, T2, R> {
R executeReadOnly(T1 t1, T2 t2) throws E;
@Override
default R apply(T1 t1, T2 t2) {
return TransactionHelper.getBean().executeReadOnly(() -> executeReadOnly(t1, t2));
}
}
<file_sep>package ir.piana.dev.core.api.func;
import io.reactivex.exceptions.Exceptions;
import java.util.function.Supplier;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface ThrowingSupplier<R, E extends Throwable> extends Supplier<R> {
R getWithException() throws E;
@Override
default R get() {
try {
return getWithException();
} catch (Throwable ex) {
throw Exceptions.propagate(ex);
}
}
default ThrowingRunnable<E> toThrowingRunnable() {
return this::getWithException;
}
}
<file_sep>jar {
baseName = 'digi-football-test'
archivesBaseName = baseName
manifest.attributes title: 'Digi Footall Test'
}
buildClients(['user'])<file_sep>package ir.piana.dev.core.api.func;
/**
* @author <NAME> (<EMAIL>)
* Date: 7/11/2019 3:44 PM
**/
@FunctionalInterface
public interface Supplier<R, E extends Throwable> {
R get() throws E;
default Runnable<E> toRunnable() {
return this::get;
}
}
| 5bfc561487182fd0820e7ca1faaef48c6ee41feb | [
"Java",
"SQL",
"Gradle"
] | 23 | Java | rahmatii1366/gradle-digi-football | 6ed24d236539f68b5ac115e2f82fcdc870500c79 | 0d6c0048977aa4f1aafd4347903ea47671db1b7f |
refs/heads/master | <file_sep>package com.example.android.quizzapp;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.SparseBooleanArray;
import android.view.View;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.RadioButton;
import android.widget.Toast;
import static android.R.id.message;
public class MainActivity extends AppCompatActivity {
/**
* Id's of the RadioButton views that should be checked.
*/
private int[] radioButtonSolutions = {R.id.jaws,
R.id.star_trek,
R.id.batman,
R.id.life_bryan,
R.id.harry_met_sally,
R.id.westworld,
R.id.game_thrones,
R.id.luke_cage,
R.id.wargames};
/**
* Id's of the CheckBox views that should be checked.
*/
private SparseBooleanArray checkBoxSolutions;
private String textBoxSolution = "yoda";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initializeCheckBoxSolutions();
}
private void initializeCheckBoxSolutions() {
checkBoxSolutions = new SparseBooleanArray();
checkBoxSolutions.append(R.id.jeor_mormont, true);
checkBoxSolutions.append(R.id.robb_stark, true);
checkBoxSolutions.append(R.id.ros, true);
checkBoxSolutions.append(R.id.talysa_maegyr, true);
checkBoxSolutions.append(R.id.kahl_drogo, true);
checkBoxSolutions.append(R.id.grey_wind, true);
checkBoxSolutions.append(R.id.ygritte, true);
checkBoxSolutions.append(R.id.shae, true);
checkBoxSolutions.append(R.id.jon_snow, false);
checkBoxSolutions.append(R.id.the_hound, false);
checkBoxSolutions.append(R.id.little_finger, false);
checkBoxSolutions.append(R.id.bran_stark, false);
checkBoxSolutions.append(R.id.jorah_mormont, false);
}
/**
* Check solutions and show results in toast
* @param view
*/
public void checkSolution(View view) {
int errorCount= 0;
errorCount += checkTextBoxes();
errorCount += checkRadioButtons();
errorCount += checkCheckBoxes();
showToast(errorCount);
}
/**
* Check CheckBox views.
* @return how many errors have been found.
*/
private int checkCheckBoxes() {
int length = checkBoxSolutions.size();
int errorCount = 0;
for (int i=0; i<length; i++) {
int key = checkBoxSolutions.keyAt(i);
CheckBox checkBox = (CheckBox) findViewById(key);
if (checkBox.isChecked() != checkBoxSolutions.get(key))
errorCount++;
}
return errorCount;
}
/**
* Check RadioButton views.
* @return how many errors have been found.
*/
private int checkRadioButtons() {
int length = radioButtonSolutions.length;
int errorCount = 0;
for (int i=0; i<length; i++) {
RadioButton radioButton = (RadioButton) findViewById(radioButtonSolutions[i]);
if (!radioButton.isChecked())
errorCount++;
}
return errorCount;
}
/**
* Check the single EditText view.
* @return if an error has been found.
*/
private int checkTextBoxes() {
EditText solution = (EditText)findViewById(R.id.do_or_do_not_answer);
String solutionString = solution.getText().toString().trim();
if(solutionString.equalsIgnoreCase(textBoxSolution))
return 0;
return 1;
}
/**
* Show a toast with the result message.
* @param errorCount how many errors have been found.
*/
private void showToast(int errorCount) {
String message;
if(errorCount > 0) {
message = getString(R.string.toast_error_message, errorCount);
}else{
message = getString(R.string.toast_success_message);
}
Toast toast = Toast.makeText(this, message, Toast.LENGTH_SHORT);
toast.show();
}
}
| 232e49847673cb5b46aeb75e110f2775186f6e55 | [
"Java"
] | 1 | Java | JuanMenendezBuitrago/QuizApp | 2274ce519bc17edad131afd93ed0727370ec0566 | 82e11f31d12f0856df987b5a4b542bd12cfa0c3d |
refs/heads/master | <file_sep>class Station
attr_reader :name,
:street,
:city,
:distance,
:access_times,
:fuel_type,
:city,
:zip,
:state
def initialize(info)
@name = info["station_name"]
@street = info["street_address"]
@state = info["state"]
@zip = info["zip"]
@city = info["city"]
@distance = info["distance"]
@access_times = info["access_days_time"]
@fuel_type = info["fuel_type_code"]
end
def self.create_stations(stations)
stations.map do |station|
new(station)
end
end
end
<file_sep>class NService
def initialize(params)
@params = params
end
def nearest_stations
get_url("/api/alt-fuel-stations/v1/nearest?limit=10&fuel_type=ELEC,LPG&location=#{@params}")['response']['fuel_stations']
end
private
def get_url(url)
response = conn.get(url)
Hash.from_xml(response.body)
end
def headers
{
'X-Api-Key' => ENV["API_KEY"]
}
end
def conn
Faraday.new(url: 'https://developer.nrel.gov', headers: headers)
end
end
<file_sep>class NPresenter
def initialize(location)
nrel = NService.new(location)
@station_data = nrel.nearest_stations
@stations = Station.create_stations(@station_data)
end
def sorted_stations
@stations.sort_by(&:distance)
end
end
<file_sep>require 'rails_helper'
describe "as a user" do
context "from the root path" do
describe "when I fill in search form and click locate" do
it "shows list of the 10 closest stations within 6 miles sorted by distance" do
VCR.use_cassette("features/search", :record => :new_episodes) do
visit root_path
fill_in "q", with: 80203
click_on "Locate"
expect(current_path).to eq("/search")
expect(page).to have_css(".station", count: 10)
expect(page).to have_css(".station-name", count: 10)
expect(page).to have_css(".station-address", count: 10)
expect(page).to have_css(".station-fuel-type", count: 10)
expect(page).to have_css(".station-distance", count: 10)
expect(page).to have_css(".station-access-times", count: 10)
expect(page).to_not have_content("E85")
end
end
end
end
end
<file_sep>require 'rails_helper'
describe NPresenter do
describe "instance methods" do
subject { NPresenter.new(80231) }
describe "#sorted_stations" do
it "should return stations sorted by distance" do
stations = subject.sorted_stations
expect(station.first).to be()
end
end
end
end
| ea8670f0f2c6bc3150cf349c0f3ed2894336e9b6 | [
"Ruby"
] | 5 | Ruby | AtmaVichara/alt-fuel-finder | 3d2cefff7c5104b779334457926551c545eace8d | 285bced089c88c62a433b67574f12aabeb3905ad |
refs/heads/master | <file_sep>## Function “makeCacheMatrix” creates a special “matrix” object that can cache its inverse.
## get is a function that returns the vector x stored in the main function.
## set is a function that changes the vector stored in the main function.
## setinverse and getinverse are functions related to set and get by storing the value of the input
## in a variable m into the main function makeCacheMatrix (setinverse) and return it (getinverse).
makeCacheMatrix <- function(x = matrix()) {
m <- NULL
set <- function(y) {
x <<- y
m <<- NULL
}
get <- function() x
setinverse <- function(solve) m <<- solve
getinverse <- function() m
list(set = set, get = get,
setinverse = setinverse,
getinverse = getinverse)
}
## Function “cacheSolve” computes the inverse of the special “matrix” (which is the input of cachemean) returned by makeCacheMatrix above.
## If the inverse was calculated or is unchanged, then it will retrieve the inverse from the cache.
## If not, it gets the matrix stored with makeCacheMatrix, the inverse, and stores it.
cacheSolve <- function(x, ...) {
## Return a matrix that is the inverse of 'x'
m <- x$getinverse()
if(!is.null(m)) {
message("getting cached data")
return(m)
}
data <- x$get()
m <- solve(data, ...)
x$setinverse(m)
m
}
| 56c66a655210839c4c5c3678cb8a803d60e49d3b | [
"R"
] | 1 | R | theodoross/ProgrammingAssignment2 | 0be90be5ca60452553a9ab0603b1175878d02464 | 489a4b7c73bfa06234fdfc68295b0e7310daf21f |
refs/heads/master | <file_sep>// api.openweathermap.org/data/2.5/weather?q={Pune}&appid={1d3ad68d46c821b2382d6ce05aa9b9b8}
const curDate = document.getElementById("date");
const weatherCon = document.getElementById("weathercon");
const tempStatus = "{{tempstatus}}";
if(tempStatus == "Sunny"){
weatherCon.innerHTML = "<i class='fas fa-sun' style='color:#eccc68'></i>";
}
else if(tempStatus == "Clouds"){
weatherCon.innerHTML = "<i class='fas fa-cloud' style='color:#f1f2f5'></i>"
}
else if(tempStatus == "Rainy"){
weatherCon.innerHTML = "<i class='fas fa-cloud-rain' style='color:#a4b0be'></i>"
}
else{
weatherCon.innerHTML = "<i class='fas fa-cloud' style='color:#44c3de'></i>"
}
const getCurrentDay = () => {
let currentTime = new Date();
var weekday = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
let day= weekday[currentTime.getDay()];
return day;
};
const getCurrentTime = () => {
var months = new Array();
months[0] = "Jan";
months[1] = "Feb";
months[2] = "Mar";
months[3] = "Apr";
months[4] = "May";
months[5] = "Jun";
months[6] = "Jul";
months[7] = "Aug";
months[8] = "Sep";
months[9] = "Oct";
months[10] = "Nov";
months[11] = "Dec";
let now = new Date();
var date = now.getDate();
var month = months[now.getMonth()];
let hours = now.getHours();
let mins = now.getMinutes();
let period = "AM";
if (hours > 11) {
period = "PM";
if (hours > 12) {
hours = hours - 12;
}
}
if (mins < 10) {
mins = "0" + mins;
}
return `${month} ${date} | ${hours}:${mins}${period}`;
// console.log(date + " | " + month + " | " + hours + ":" + mins + period);
}
curDate.innerHTML = getCurrentDay() + " | " + getCurrentTime();
// getCurrentDay();
// getCurrentTime();<file_sep>const express = require("express");
const app = express();
const path = require("path");
const hbs = require("hbs");
const port = process.env.PORT || 3000;
hbs.registerPartials(path.join(__dirname, "../partials"));
// console.log(path.join(__dirname, "../partials"));
app.set("view engine", "hbs");
// console.log(app.get("view engine"));
app.use(express.static(path.join(__dirname, "../public")));
app.get("/", (req, res) => {
res.render("index", {
title: "Prince HomePage"
});
})
app.get("/about", (req, res) => {
res.render("aboutpg", {
prince: "Prince Title"
});
})
app.get("/weather", (req, res) => {
res.render("weather");
})
app.get("*", (req, res) => {
res.render("404", {
errormssg: "Oops!! Page not Found"
});
})
app.listen(port, () => {
console.log(`listening to ${port}`);
})<file_sep>const city_name = document.getElementById("city_name");
const cityName = document.getElementById("cityname");
const submitBtn = document.getElementById("submitBtn");
const temp_status = document.getElementById("temp_status");
const temp = document.getElementById("temp");
const dataHide = document.querySelector('.middle_layer');
const day = document.getElementById("day");
today_date= document.getElementById('today_date');
dataHide.classList.add('data_hide');
//function defination
const getCurrentDay = () => {
let currentTime = new Date();
var weekday = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
let day= weekday[currentTime.getDay()];
return day;
};
const getCurrentDate = ()=>{
let current = new Date();
let date = current.getDate();
return date;
}
const getCurMonth = ()=>{
var months = new Array();
months[0] = "Jan";
months[1] = "Feb";
months[2] = "Mar";
months[3] = "Apr";
months[4] = "May";
months[5] = "Jun";
months[6] = "Jul";
months[7] = "Aug";
months[8] = "Sep";
months[9] = "Oct";
months[10] = "Nov";
months[11] = "Dec";
let cur = new Date();
let month = months[cur.getMonth()];
return month;
}
const getInfo = async (event) => {
event.preventDefault();
let cityVal = cityName.value;
if (cityVal === "") {
city_name.innerText = "Please type a city name !!";
} else {
try {
let url = `https://api.openweathermap.org/data/2.5/weather?q=${cityVal}&units=metric&appid=1d3ad68d46c821b2382d6ce05aa9b9b8`;
const response = await fetch(url);
const data = await response.json();
const arrData = [data];
city_name.innerText= `${arrData[0].name}, ${arrData[0].sys.country}`;
temp.innerText = arrData[0].main.temp;
// temp_status.innerText = arrData[0].weather[0].main;
//condition to check favicon
const tempMood = arrData[0].weather[0].main;
if(tempMood == "Clear"){
temp_status.innerHTML = "<i class='fa fa-sun' style='color:#eccc68;'></i>";
}
else if(tempMood=="Clouds"){
temp_status.innerHTML = "<i class='fa fa-cloud' style='color:#1f2f6;'></i>"
}
else if(tempMood=="Rain"){
temp_status.innerHTML = "<i class='fa fa-rain' style='color:#a4b0be;'></i>"
}
else{
temp_status.innerHTML = "<i class='fa fa-cloud' style='color:#f1f2f6;'></i>"
}
dataHide.classList.remove('data_hide');
}
catch{
city_name.innerText = "Please type city name correctly !!";
dataHide.classList.add('data_hide');
}
}
};
submitBtn.addEventListener("click", getInfo);
day.innerText = getCurrentDay();
today_date.innerText = `${getCurrentDate()}|${getCurMonth()}` ; | b8dc5eff5f958bb4569e8a28a98dcbac3ada0ad9 | [
"JavaScript"
] | 3 | JavaScript | PRINCE-RANA123/Express-Web | 18b970a8b79c203f060cc4141692385dee8a6c1b | 69a8808b13657bc5099861d5f1c4dad8de888ca5 |
refs/heads/master | <file_sep>module.exports = {
database: {
host : 'database-1.ccfivm33z2y3.us-east-2.rds.amazonaws.com',
user : 'admin',
password : '<PASSWORD>',
database : 'futbolce_zon58'
}
};<file_sep># gemelas
lg
<file_sep>var express = require('express');
var app = express();
const path = require('path');
var fb = require('./routers/index');
const morgan = require('morgan');
const puerto = 4000;
const pool = require('./database');
app.use(morgan('tiny'));
app.use(fb);
app.use(express.static(path.join(__dirname,'../../public')));
app.set('views' , path.join(__dirname, 'views'))
app.set('view engine', 'ejs');
app.use(express.urlencoded({extended: false}))
// Correr el servidor con el puerto 8989.
app.listen(puerto, function () {
console.log(`corriendo puerto ${puerto}`);
});<file_sep>const pool = require('../database');
const categorias = {};
categorias.inicio = (req, res) => { res.render('index');}
categorias.mixto = async (req, res) => {
const vistas = await pool.query("SELECT * FROM `G_general_mixto_a20`");
const categoria = 'Gemelas Mixto C20'
const goleo = {}
const jornadas = await pool.query("SELECT * FROM `g_jor_mix_c20` ORDER BY `Jornada` DESC");
const Equipos = await pool.query("SELECT * FROM `g_jor_mix_c20` ORDER BY `Equipolc` DESC , `Jornada` DESC ");
res.render('tablas/principal',{vistas,categoria,goleo,jornadas,Equipos});}
categorias.femenil = async (req, res) => {
const vistas = await pool.query("SELECT * FROM `G_Generalfeme_a20`");
const categoria = 'Gemelas Femenil'
const goleo = {}
const jornadas = await pool.query("SELECT * FROM `G_jor_feme_a20` ORDER BY `Jornada` DESC");
const Equipos = await pool.query("SELECT * FROM `G_jor_feme_a20` ORDER BY `Equipolc` DESC , `Jornada` DESC ");
res.render('tablas/principal',{vistas,categoria,goleo,jornadas,Equipos});}
categorias.sub19 = async (req, res) => {
const vistas = await pool.query("SELECT * FROM `G_general_sub19_a20`");
const categoria = 'Gemelas Sub-19'
const goleo = {}
const jornadas = await pool.query("SELECT * FROM `G_jor_sub19_A20` ORDER BY `Jornada` DESC");
const Equipos = await pool.query("SELECT * FROM `G_jor_sub19_A20` ORDER BY `Equipolc` DESC , `Jornada` DESC ");
res.render('tablas/principal',{vistas,categoria,jornadas,Equipos,goleo});}
categorias.Historica = async (req, res) => {
const vistas = await pool.query("SELECT * FROM `g_historica_c20`");
const categoria = 'Historica Gemelas'
const goleo = {}
const jornadas = {}
const Equipos = {}
res.render('tablas/principal',{vistas,categoria,goleo,jornadas,Equipos});
}
categorias.Jugadores = (req, res) => {
const categoria = 'id'
const buscar = 'buscarid'
res.render('id/forbuscar',{categoria,buscar});
}
categorias.Equipos = (req, res) => {
const categoria = 'equipo'
const buscar = 'buscarequipo'
res.render('id/forbuscar',{categoria,buscar});
}
categorias.actual = (req, res) => { res.send('Actual ');}
///buscar jugadore
categorias.id = async (req, res) => {
var id = req.query.id;
const registro = await pool.query("Select * From `Registro Global Heroes` WHERE ID_FB = ?" , [id])
console.log(id)
console.log(registro)
res.render('id/jugadores/id',{registro});
}
categorias.buscarid = async (req, res) => {
var id = req.query.id;
var idv = "%"+ id + "%";
const registro = await pool.query("SELECT * FROM `Registro Global Heroes` WHERE `Nombres` LIKE ?",[idv])
console.log(registro)
res.render('id/jugadores/buscarid',{registro,id})}
///buscar equipos
categorias.buscarequipo = async (req, res) => {
var id = req.query.id;
var idv = "%"+ id + "%";
const registro = await pool.query("SELECT * FROM `Registros Global Equipo Heroes` WHERE `Nombre_Equipo` LIKE ?" , [idv])
console.log(registro)
res.render('id/equipos/buscarequipos',{registro,id})}
categorias.idequipo = async (req, res) => {
var id = req.query.id;
const Globales = await pool.query("Select * From `g_historica_c20` WHERE ID = ?" , [id])
const registro = await pool.query("Select * From `Registros Global Equipo Heroes` WHERE Id_plantel = ?" , [id])
const Globalesfeme = await pool.query("Select * From `G_Generalfeme_a20` WHERE ID = ?" , [id])
const Globalesmix = await pool.query("Select * From `G_general_mixto_a20` WHERE ID = ?" , [id])
const Globales19 = await pool.query("Select * From `G_general_sub19_a20` WHERE ID = ?" , [id])
console.log(id)
console.log(registro)
res.render('id/equipos/equipo',{registro,id,Globales,Globalesmix,Globales19,Globalesfeme})}
module.exports = categorias; | 15c4e605d235850172f66ba4673d4c5956113461 | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | ErikNavarrete58/gemelas | 4c381f58b1c7df191d5300342f90a83f6d95718f | 2c1a73b84bb365ec5bc3214e3e40c2feb8fe5898 |
refs/heads/master | <repo_name>kendo1994/GitTest<file_sep>/README.md
# GitTest
สอบอาจารย์ยอด
<file_sep>/Assets/Scripts/ShowItem.cs
using UnityEngine;
using System.Collections;
using Mono.Data.SqliteClient;
using System.Data;
using System;
using UnityEngine.UI;
public class ShowItem : MonoBehaviour {
public Text txtChange;
public void show(){
string connectionString = "URI=file:" + Application.dataPath + "/myDatabase.db"; //Path to database.
IDbConnection dbconn;
dbconn = (IDbConnection) new SqliteConnection(connectionString);
dbconn.Open(); //Open connection to the database.
IDbCommand dbcmd = dbconn.CreateCommand();
string sqlQuery = "Select * " + "From Account";
dbcmd.CommandText = sqlQuery;
IDataReader reader = dbcmd.ExecuteReader();
while(reader.Read()){
string username = reader.GetString(1);
string password = reader.GetString(2);
int age = reader.GetInt32(3) ;
txtChange.text += "\nUsername = " + username + "\nPassword = " + password + "\nAge = " + age;
}
reader.Close();reader = null;
dbcmd.Dispose();dbcmd = null;
dbconn.Close();dbconn = null;
}
}
| 73ead9b72d6f1841cc7fb19257220befe296704b | [
"Markdown",
"C#"
] | 2 | Markdown | kendo1994/GitTest | 5141bf7770a882baf5d62f19276fb90af59c9381 | 235b82586d32fb5bc1fa487384a10bfdd5ff92ad |
refs/heads/master | <file_sep> function controlInactividad(idleMinInput, warningMinInput, logoutUrl) {
var t;
var activeTime;
var warningCountdown;
var sessExpirDiv = document.getElementById('sessExpirDiv');
window.onload = resetTimer; /* Window is refreshed. */
// window.onmousemove = resetTimer; /* Mouse is moved. */
window.onkeypress = resetTimer; /* Key is pressed. */
// window.onmousedown = resetTimer; /* Touchscreen is pressed. */
window.onclick = resetTimer; /* Touchpad clicks. */
window.onscroll = resetTimer; /* Scrolling with arrow keys. */
function warning(idleSeconds, warningSeconds) {
warningStart = setTimeout(function() {
sessExpirDiv.style.opacity = '1';
sessExpirDiv.style.zIndex = '999999';
}, 1000); /* Wtihout this, warning div would appear before the text. */
remaining = idleSeconds - warningSeconds;
warningCountdown = setInterval(function() { /* Update every 1 second. */
if (remaining <= 0) {
/* Now we check that no other tab has been active after us. */
var browserActive = localStorage.getItem('activeTime');
if (activeTime != browserActive) { /* Then another tab has been active more recently than this tab. */
// alert("Not the same. User has been active in another tab. browserActive: " + browserActive + " and activeTime: " + activeTime);
/* We want to keep going, because user might close the other tab - and if this script is broken, the controlInactividad is broken. */
controlInactividad(idleMinInput, warningMinInput, logoutUrl);
} else {
// alert("The same. User has not been active in another tab. browserActive: " + browserActive + " and activeTime: " + activeTime);
logout();
}
} else {
remaining -= 1;
document.getElementById('sessExpirDiv').innerHTML =
`<div class="row center">
<div class="col s12 center">
<div class="card red darken-4 center">
<div class="card-content white-text">
<span class="card-title">Alerta</span>
<p>Esto se va a cerrar. Use el teclado ó el mouse para mantener logueado
${remaining} segundos restantes </p>
</div>
<div class="card-action black">
<a href="./index.html">Terminar</a> <a href="#">Mantener</a>
</div>
</div>
</div>
</div>`;
}
}, 1000);
}
function recordTime() {
activeTime = Date.now(); /* Milliseconds since 1970/01/01. */
localStorage.setItem('activeTime', activeTime);
}
function clearEverything() {
clearTimeout(t);
clearInterval(warningCountdown);
clearWarning();
}
function clearWarning() {
sessExpirDiv.style.opacity = '0';
sessExpirDiv.innerHTML = ' ';
sessExpirDiv.style.zIndex = '-999999';
}
function logout() {
window.location.href = logoutUrl;
}
function resetTimer() {
console.log("leooooooddddddd");
clearEverything();
recordTime(); /* Records across all tabs in browser. */
var idleMinutes = idleMinInput; /* After how long idle time do we log out user? */
var warningMinutes = warningMinInput; /* After how long idle time do we start the warning countdown? */
var idleSeconds = parseInt(idleMinutes * 60);
var warningSeconds = parseInt(warningMinutes * 60);
var wMilliSeconds = warningSeconds * 1000;
/* When user has been idle warningSeconds number of seconds, we display warning and countdown. */
t = setTimeout(function() { warning(idleSeconds, warningSeconds); }, wMilliSeconds);
}
};
// export class ControlDeInactividad {
// constructor(propiedades = {
// titulo,
// mensaje,
// inactividad,
// conteoRegresivo,
// monitoreo: { intervalo, clase, accion, rspuesta },
// accion
// }) {
// propiedades.inactividad = 3;
// propiedades.idTimeout = null;
// propiedades.reiniciar = true;
// propiedades.materialDialog;
// this.monitorearSesion(propiedades);
// // this.idleTimeout;
// // // Init on page load
// // this.resetIdleTimeout(propiedades);
// // // Reset the idle timeout on any of the events listed below
// // ['click', 'touchstart', 'mousemove'].forEach(evt =>
// // document.addEventListener(evt, this.resetIdleTimeout, propiedades, false)
// // );
// };
// iniciarInactividad(propiedades) {
// if (propiedades.reiniciar) {
// if (propiedades.idTimeout) {
// propiedades.idTimeout;
// }
// this.idleTimeout = setTimeout(() => {
// // location.href = this.redirectUrl;
// x(propiedades)
// }, propiedades.inactividad * 1000);
// // Configurar el contador de tiempo de inactividad propiedades.idTimeout
// // Configurar un MaterialDialog con los botones 'Sí, terminar' y 'No, continuar',
// // El primero para llamar al callBack ‘Acción’ dado como argumento en la creación de la instancia de tipo ControlDeInactividad.
// // El segundo para que cuando se pulse clic sobre el, se detenga el conteo regresivo.
// let cuentaRegresiva = 0;
// propiedades.conteoRegresivo
// // En cada segundo incrementar el conteo y actualizar el mensaje de
// // conteo regresivo
// if se alcanza el límite de espera para que el usuario reaccione {
// // llamar al callBack ‘Acción’ dado como argumento en la creación de
// // la instancia
// }
// propiedades.reiniciar = true;
// }
// };
// monitorearSesion(propiedades) {
// ['click', 'touchstart', 'mousemove'].forEach(evt =>
// document.addEventListener(evt, iniciarInactividad(propiedades), propiedades.reiniciar = true, false)
// );
// // Para cada evento 'click', 'touchstart' o 'mousemove' establecer como acciones {
// // propiedades.reiniciar = true;
// // iniciarInactividad(propiedades);
// }
// CuadroDialogo(propiedades) {
// MaterialDialog.dialog(propiedades.mensaje, {
// title: propiedades.titulo,
// buttons: {
// close: {
// text: 'Terminar',
// callback: propiedades.accion
// },
// confirm: {
// text: 'Continuar',
// callback: () => propiedades.monitoreo.accion
// }
// }
// });
// }
// // Forzar a que ocurra uno de los eventos anteriores
// // Aquí es necesario utilizar setInterval con un callBack que se encargue de solicitar al back-end la verificación de la sesión, es decir, si el status de la misma es activo o no. Esto debe hacerse en el intervalo de segundos establecido para el monitoreo.
// // Si llegase a suceder que la sesión ya no esté activa, entonces se dispara la función asignada al callBack ‘accion’. Ver el ejemplo de creación del objeto.
// }
// CuadroDialogo(propiedades) {
// MaterialDialog.dialog(propiedades.mensaje, {
// title: propiedades.titulo,
// buttons: {
// close: {
// text: 'Terminar',
// callback: propiedades.accion
// },
// confirm: {
// text: 'Continuar',
// callback: () => propiedades.monitoreo.accion
// }
// }
// });
// }
// resetIdleTimeout(propiedades) {
// let x = this.CuadroDialogo;
// // Clears the existing timeout
// if (this.idleTimeout) {
// clearTimeout(this.idleTimeout);
// }
// // Set a new idle timeout to load the redirectUrl after idleDurationSecs
// this.idleTimeout = setTimeout(() => {
// // location.href = this.redirectUrl;
// x(propiedades)
// }, propiedades.inactividad * 1000);
// };
// function idleTimer() {
// var t;
// //window.onload = resetTimer;
// window.onmousemove = resetTimer; // catches mouse movements
// window.onmousedown = resetTimer; // catches mouse movements
// window.onclick = resetTimer; // catches mouse clicks
// window.onscroll = resetTimer; // catches scrolling
// window.onkeypress = resetTimer; //catches keyboard actions
// function logout() {
// window.location.href = '/action/logout'; //Adapt to actual logout script
// }
// function reload() {
// window.location = self.location.href; //Reloads the current page
// }
// function resetTimer() {
// clearTimeout(t);
// t = setTimeout(logout, 1800000); // time is in milliseconds (1000 is 1 second)
// t = setTimeout(reload, 300000); // time is in milliseconds (1000 is 1 second)
// }
// }
// idleTimer(); | 0dfd424cd7214185f7771f684a300354c9edf4a1 | [
"JavaScript"
] | 1 | JavaScript | leocardc/proyecto-prog3 | 19e61b1ca8cb33e308a4af6ae8d9e1bf0eb0246e | bfecbeaa145d078e5d0d64ee102f9eb9a4c2b884 |
refs/heads/master | <file_sep>function getFirstSelector(selector){
var selected = document.querySelector(selector)
return selected
}
function nestedTarget(){
var target = document.querySelector("#nested .target")
return target
}
function increaseRankBy(n){
var target = document.querySelectorAll(".ranked-list")
var int = ""
for (var i = 0; i < target.length; i++) {
int = parseInt(target[i].innerHTML)
target[i].innerHTML = (int + n).toString()
}
return target
}
function deepestChild(){
var selector = document.querySelector("div#grand-node")
var next = []
for (var i = 0; i < selector.length; i++) {
next = document.querySelector(selector[i])
}
return selector
}
| e364e6877347376ea4da059b45432a32c19632e0 | [
"JavaScript"
] | 1 | JavaScript | rj-ortega/javascript-hide-and-seek-bootcamp-prep-000 | 491c5a6f3ef309758c6b5009a3ab7b5005bdc83d | 8857f8e8e8581abcace4d00ce63ee2e7b38df449 |
refs/heads/master | <repo_name>huunhancit/springsecurity<file_sep>/springsecurity-customlogin/settings.gradle
rootProject.name = 'springsecurity-customlogin'
<file_sep>/spring-sercurity-annotation/settings.gradle
rootProject.name = 'spring-sercurity-annotation'
<file_sep>/springmvc-annotation/settings.gradle
rootProject.name = 'springmvc-annotation'
<file_sep>/springsecurity-hibernate/settings.gradle
rootProject.name = 'springsecurity-hibernate'
<file_sep>/springsecurity-limit/settings.gradle
rootProject.name = 'springsecurity-limit'
<file_sep>/springmvc-annotation/src/main/java/com/tma/service/HelloService.java
package com.tma.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
/**
* Created by dhnhan on 7/31/15.
*/
@Service
public class HelloService {
private static final Logger log = LoggerFactory.getLogger(HelloService.class);
public String getMessage() {
log.debug("getMessage() is executed !");
return "Spring MVC + Gradle Hello world";
}
public String getTitle(String name) {
log.debug("getTitle() is executed : {}", name);
if (StringUtils.isEmpty(name)) {
return "Hello world";
} else {
return "Hello " + name;
}
}
}
<file_sep>/springsecurity-hibernate/src/main/java/com/tma/config/core/SpringSecurityInitializer.java
package com.tma.config.core;
/**
* Created by dhnhan on 8/4/15.
*/
public class SpringSecurityInitializer {
}
<file_sep>/springsecurity-hibernate/src/main/java/com/tma/users/dao/UserDao.java
package com.tma.users.dao;
import com.tma.users.model.User;
/**
* Created by dhnhan on 8/4/15.
*/
public interface UserDao {
User findByUserName(String username);
}
<file_sep>/springsecurity-hibernate/build.gradle
group 'com.tma'
version '1.0-SNAPSHOT'
apply plugin: 'java'
apply plugin: 'war'
sourceCompatibility = 1.8
repositories {
mavenCentral()
mavenLocal()
}
dependencies {
testCompile group: 'junit', name: 'junit', version: '4.11'
compile 'commons-dbcp:commons-dbcp:1.4'
compile 'org.hibernate:hibernate-core:4.3.10.Final'
compile 'org.springframework:spring-core:4.1.6.RELEASE'
compile 'org.springframework:spring-webmvc:4.1.6.RELEASE'
compile 'org.springframework:spring-orm:4.1.6.RELEASE'
compile 'org.springframework.security:spring-security-web:4.0.1.RELEASE'
compile 'org.springframework.security:spring-security-config:4.0.1.RELEASE'
compile 'org.springframework.security:spring-security-taglibs:4.0.1.RELEASE'
compile 'jstl:jstl:1.2'
compile 'mysql:mysql-connector-java:5.1.31'
compile 'org.slf4j:slf4j-api:1.7.5'
compile 'org.slf4j:jcl-over-slf4j:1.7.5'
compile 'ch.qos.logback:logback-classic:1.0.13'
providedCompile 'javax.servlet:servlet-api:2.5'
}
<file_sep>/springsecurity-database/settings.gradle
rootProject.name = 'springsecurity-database'
| 2c289102cb35a2abc855dd5729ed62c7bd160757 | [
"Java",
"Gradle"
] | 10 | Gradle | huunhancit/springsecurity | 6320e1386a79400545f7c2b7a2b9bdb27a7471d8 | aceb8f0dc74be651afed3288bac7d6af8968798c |
refs/heads/master | <file_sep># FoodPin
FoodPin
This is new swift project. About mark some good restruant you liked.
<file_sep>//
// ReviewViewController.swift
// FoodPin
//
// Created by KlayThompson on 2017/3/14.
// Copyright © 2017年 AppCoda. All rights reserved.
//
import UIKit
class ReviewViewController: UIViewController {
@IBOutlet weak var topImageView: UIImageView!
@IBOutlet weak var containerView: UIView!
@IBOutlet weak var backgroundImageView: UIImageView!
var restaurant:RestaurantMO!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
backgroundImageView.image = UIImage (data: restaurant.image! as Data)
topImageView.image = UIImage (data: restaurant.image! as Data)
let blurEffect = UIBlurEffect (style: .dark)
let blurEffectView = UIVisualEffectView (effect: blurEffect)
blurEffectView.frame = view.bounds
backgroundImageView.addSubview(blurEffectView)
// containerView.transform = CGAffineTransform.init(scaleX: 0, y: 0)
// containerView.transform = CGAffineTransform.init(translationX: 0, y: -1000)
let scaleTransform = CGAffineTransform.init(scaleX: 0, y: 0)
let translateTransform = CGAffineTransform.init(translationX: 0, y: -1000)
let combineTransform = translateTransform.concatenating(scaleTransform)
containerView.transform = combineTransform
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// UIView.animate(withDuration: 0.3) {
// self.containerView.transform = CGAffineTransform.identity
// }
UIView.animate(withDuration: 0.5, delay: 0.0, usingSpringWithDamping: 0.3, initialSpringVelocity: 0.7, options: .curveEaseInOut, animations: {
self.containerView.transform = CGAffineTransform.identity
}, completion: nil)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>//
// DiscoveryTableViewController.swift
// FoodPin
//
// Created by KlayThompson on 2017/3/22.
// Copyright © 2017年 AppCoda. All rights reserved.
//
import UIKit
import CoreData
class DiscoveryTableViewController: UITableViewController {
var restaurants:[RestaurantMO] = []
var fetchResultController: NSFetchedResultsController<RestaurantMO>!
override func viewDidLoad() {
super.viewDidLoad()
// Uncomment the following line to preserve selection between presentations
// self.clearsSelectionOnViewWillAppear = false
// Uncomment the following line to display an Edit button in the navigation bar for this view controller.
// self.navigationItem.rightBarButtonItem = self.editButtonItem()
self.tableView.rowHeight = 250
navigationController?.hidesBarsOnSwipe = true
DispatchQueue.global().async {
self.loadData()
DispatchQueue.main.async {
self.tableView.reloadData()
}
}
}
func loadData() {
let fetchRequest: NSFetchRequest<RestaurantMO> = RestaurantMO.fetchRequest()
let sortDescriptor = NSSortDescriptor (key: "name", ascending: true)
fetchRequest.sortDescriptors = [sortDescriptor]
if let app = (UIApplication.shared.delegate as? AppDelegate) {
let context = app.persistentContainer.viewContext
fetchResultController = NSFetchedResultsController (fetchRequest: fetchRequest, managedObjectContext: context, sectionNameKeyPath: nil, cacheName: nil)
do {
try fetchResultController.performFetch()
if let fetchedObjects = fetchResultController.fetchedObjects {
restaurants = fetchedObjects
}
} catch {
print(error)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Table view data source
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
// #warning Incomplete implementation, return the number of rows
return restaurants.count
}
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath) as! DiscoveryTableViewCell
// Configure the cell...
let restaurantMO = restaurants[indexPath.row]
cell.configCellDataWithRestaurantMO(restaurantMO: restaurantMO)
return cell
}
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>//
// RestaurantDetailViewController.swift
// FoodPin
//
// Created by <NAME> on 20/7/2016.
// Copyright © 2016 AppCoda. All rights reserved.
//
import UIKit
import MapKit
import ZYBannerView
class RestaurantDetailViewController: UIViewController, UITableViewDataSource, UITableViewDelegate {
@IBOutlet weak var mapView: MKMapView!
// @IBOutlet var restaurantImageView: UIImageView!
@IBOutlet var tableView:UITableView!
@IBOutlet weak var bannerView: ZYBannerView!
var restaurant:RestaurantMO!
var localImages: [UIImage]?
override func viewDidLoad() {
super.viewDidLoad()
bannerView.dataSource = self
localImages = restaurant.images as? [UIImage]
// restaurantImageView.image = UIImage(data: restaurant.image! as Data)
tableView.backgroundColor = UIColor(red: 240.0/255.0, green: 240.0/255.0, blue: 240.0/255.0, alpha: 0.2)
tableView.separatorColor = UIColor(red: 240.0/255.0, green: 240.0/255.0, blue: 240.0/255.0, alpha: 0.8)
title = restaurant.name
navigationController?.hidesBarsOnSwipe = false
tableView.estimatedRowHeight = 36.0
tableView.rowHeight = UITableViewAutomaticDimension
let tapGestureRecognizer = UITapGestureRecognizer (target: self, action: #selector(showMap))
mapView.addGestureRecognizer(tapGestureRecognizer)
let geoCoder = CLGeocoder()
geoCoder.geocodeAddressString(restaurant.location!, completionHandler: {
placemarks, error in
if error != nil {
print(error ?? "")
return
}
if placemarks != nil {
// Get the first placemark
let placemark = placemarks?[0]
// Add annotation
let annotation = MKPointAnnotation()
if let location = placemark?.location {
// Display the annotation
annotation.coordinate = location.coordinate
self.mapView.addAnnotation(annotation)
// Set the zoom level
let region = MKCoordinateRegionMakeWithDistance(annotation.coordinate, 250, 250)
self.mapView.setRegion(region, animated: true)
}
}
})
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
navigationController?.hidesBarsOnSwipe = false
navigationController?.setNavigationBarHidden(false, animated: true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - UITableView
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 5
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath) as! RestaurantDetailTableViewCell
// Configure the cell...
switch indexPath.row {
case 0:
cell.fieldLabel.text = NSLocalizedString("Name", comment: "Name Field")
cell.valueLabel.text = restaurant.name
case 1:
cell.fieldLabel.text = "Type"
cell.valueLabel.text = restaurant.type
case 2:
cell.fieldLabel.text = "Location"
cell.valueLabel.text = restaurant.location
case 3:
cell.fieldLabel.text = "Phone"
cell.valueLabel.text = restaurant.phone
case 4:
cell.fieldLabel.text = "Been here"
cell.valueLabel.text = (restaurant.isVisited) ? "Yes, I've been here before. \(String(describing: restaurant.rating))" : "No"
default:
cell.fieldLabel.text = ""
cell.valueLabel.text = ""
}
cell.backgroundColor = UIColor.clear
return cell
}
@IBAction func close(segue: UIStoryboardSegue) {
}
@IBAction func ratingButtonTapped(segue:UIStoryboardSegue) {
if let rating = segue.identifier {
restaurant.isVisited = true
switch rating {
case "great": restaurant.rating = "Absolutely love it! Must try."
case "good": restaurant.rating = "Pretty good."
case "dislike": restaurant.rating = "I don't like it."
default: break
}
}
if let app = (UIApplication.shared.delegate as? AppDelegate) {
app.saveContext()
}
tableView.reloadData()
}
func showMap() {
performSegue(withIdentifier: "showMap", sender: self)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "showReview" {
let destinationController = segue.destination as! ReviewViewController
destinationController.restaurant = restaurant
} else if segue.identifier == "showMap" {
let destinationController = segue.destination as! MapViewController
destinationController.restaurant = restaurant
}
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
extension RestaurantDetailViewController: ZYBannerViewDataSource {
func numberOfItems(inBanner banner: ZYBannerView!) -> Int {
return localImages?.count ?? 0
}
func banner(_ banner: ZYBannerView!, viewForItemAt index: Int) -> UIView! {
guard let image = localImages?[index] else {
return UIView()
}
let imageView = UIImageView(image: image)
imageView.contentMode = .scaleAspectFill
imageView.layer.masksToBounds = true
return imageView
}
func banner(_ banner: ZYBannerView!, titleForFooterWith footerState: ZYBannerFooterState) -> String! {
return "别扯了,扯坏了"
}
}
<file_sep>//
// DiscoveryTableViewCell.swift
// FoodPin
//
// Created by KlayThompson on 2017/3/23.
// Copyright © 2017年 AppCoda. All rights reserved.
//
import UIKit
class DiscoveryTableViewCell: UITableViewCell {
@IBOutlet weak var restaurantImageView: UIImageView!
@IBOutlet weak var nameLabel: UILabel!
@IBOutlet weak var typeLabel: UILabel!
@IBOutlet weak var adressLabel: UILabel!
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
func configCellDataWithRestaurantMO(restaurantMO: RestaurantMO) {
restaurantImageView.image = UIImage (data: restaurantMO.image! as Data)
nameLabel.text = restaurantMO.name
typeLabel.text = restaurantMO.type
adressLabel.text = restaurantMO.location
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>//
// AppDelegate.swift
// FoodPin
//
// Created by <NAME> on 7/7/2016.
// Copyright © 2016 AppCoda. All rights reserved.
//
import UIKit
import CoreData
import UserNotifications
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
enum QuickAction: String {
case OpenFavorites = "OpenFavorites"
case OpenDiscover = "OpenDiscover"
case NewRestaurant = "NewRestaurant"
init?(fullIdentifier: String) {
guard let shortcutIdentifier = fullIdentifier.components(separatedBy: ".").last else {
return nil
}
self.init(rawValue: shortcutIdentifier)
}
}
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey : Any]? = nil) -> Bool {
// Override point for customization after application launch.
UINavigationBar.appearance().barTintColor = UIColor(red: 216.0/255.0, green: 74.0/255.0, blue: 32.0/255.0, alpha: 1.0)
UINavigationBar.appearance().tintColor = UIColor.white
if let barFont = UIFont(name: "AvenirNextCondensed-DemiBold", size: 24.0) {
UINavigationBar.appearance().titleTextAttributes = [NSForegroundColorAttributeName:UIColor.white, NSFontAttributeName:barFont]
}
UIApplication.shared.statusBarStyle = .lightContent
//Tabbar
UITabBar.appearance().tintColor = UIColor(red: 235.0/255.0, green: 75.0/255.0,
blue: 27.0/255.0, alpha: 1.0)
UITabBar.appearance().barTintColor = UIColor(red: 236.0/255.0, green: 240.0/255.0,
blue: 241.0/255.0, alpha: 1.0)
// UITabBar.appearance().selectionIndicatorImage = UIImage(named: "tabitem-selected")
//判断有没有打开通知
UNUserNotificationCenter.current().requestAuthorization(options: [.alert, .sound, .badge]) { (granted, error) in
if granted {
print("User notifications are allowed.")
} else {
print("User notifications are not allowed.")
}
}
return true
}
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
lazy var persistentContainer: NSPersistentContainer = {
let container = NSPersistentContainer (name: "FoodPin")
container.loadPersistentStores(completionHandler: { (storeDescription, error) in
if let error = error as NSError? {
fatalError("Unresolved error \(error), \(error.userInfo)")
}
})
return container
}()
func saveContext() {
let context = persistentContainer.viewContext
if context.hasChanges {
do {
try context.save()
} catch {
let nserror = error as NSError
fatalError("Unresolved error \(nserror), \(nserror.userInfo)")
}
}
}
// MARK: - 3D-Touch
func application(_ application: UIApplication, performActionFor shortcutItem: UIApplicationShortcutItem, completionHandler: @escaping (Bool) -> Void) {
print("performActionFor is called")
completionHandler(handleQuickAction(shortcutItem: shortcutItem))
}
private func handleQuickAction(shortcutItem: UIApplicationShortcutItem) -> Bool {
let shortcutType = shortcutItem.type
guard let shortcutIdentifier = QuickAction(fullIdentifier: shortcutType) else {
return false
}
guard let tabBarController = window?.rootViewController as? UITabBarController else {
return false
}
switch shortcutIdentifier {
case.OpenFavorites:
tabBarController.selectedIndex = 0
case.OpenDiscover:
tabBarController.selectedIndex = 1
case.NewRestaurant:
if let navController = tabBarController.viewControllers?[0] {
let restaurantTableViewController = navController.childViewControllers[0]
restaurantTableViewController.performSegue(withIdentifier: "addRestaurant", sender: restaurantTableViewController)
} else {
return false
}
}
return true
}
}
<file_sep>//
// AddRestaurantController.swift
// FoodPin
//
// Created by KlayThompson on 2017/3/15.
// Copyright © 2017年 AppCoda. All rights reserved.
//
import UIKit
import CoreData
import ZLPhotoBrowser
import ZYBannerView
class AddRestaurantController: UITableViewController,UIImagePickerControllerDelegate,UINavigationControllerDelegate {
@IBOutlet weak var photoImageView: UIImageView!
@IBOutlet weak var nameTextField: UITextField!
@IBOutlet weak var typeTextField: UITextField!
@IBOutlet weak var locationTextField: UITextField!
@IBOutlet weak var yesButton: UIButton!
@IBOutlet weak var noButton: UIButton!
@IBOutlet weak var phoneTextField: UITextField!
@IBOutlet weak var bannerView: ZYBannerView!
var localImages: [UIImage]?
var restaurant:RestaurantMO!
var isVisited = true
override func viewDidLoad() {
super.viewDidLoad()
// Uncomment the following line to preserve selection between presentations
// self.clearsSelectionOnViewWillAppear = false
// Uncomment the following line to display an Edit button in the navigation bar for this view controller.
// self.navigationItem.rightBarButtonItem = self.editButtonItem()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBAction func saveButtonClick(_ sender: UIBarButtonItem) {
var tipMessageString = ""
if phoneTextField.text == "" {
tipMessageString = "We can't proceed because one of the fields is blank. Please note that all fields are required."
}
if locationTextField.text == "" {
tipMessageString = "We can't proceed because one of the fields is blank. Please note that all fields are required."
}
if typeTextField.text == "" {
tipMessageString = "We can't proceed because one of the fields is blank. Please note that all fields are required."
}
if nameTextField.text == "" {
tipMessageString = "We can't proceed because one of the fields is blank. Please note that all fields are required."
}
if tipMessageString != "" {
let alertTip = UIAlertController (title: "Oops", message: tipMessageString, preferredStyle: .alert)
let cancelAction = UIAlertAction (title: "OK", style: .cancel, handler: nil)
alertTip.addAction(cancelAction)
self.present(alertTip, animated: true, completion: nil)
return
}
self.saveRestaruantData()
performSegue(withIdentifier: "unwindToHomeScreen", sender: self)
}
func saveRestaruantData() {
if let appDelegate = UIApplication.shared.delegate as? AppDelegate {
restaurant = RestaurantMO (context: appDelegate.persistentContainer.viewContext)
restaurant.name = nameTextField.text
restaurant.location = locationTextField.text
restaurant.type = typeTextField.text
restaurant.isVisited = isVisited
restaurant.phone = phoneTextField.text
restaurant.images = localImages! as NSObject
if let restaruantImage = photoImageView.image {
if let imageData = UIImagePNGRepresentation(restaruantImage) {
restaurant.image = NSData (data: imageData)
}
}
print("Saving data to context ...")
appDelegate.saveContext()
}
}
@IBAction func toggleBeenHereButton(_ sender: UIButton) {
if sender == yesButton {
isVisited = true
yesButton.backgroundColor = UIColor.red
noButton.backgroundColor = UIColor.lightGray
} else if sender == noButton {
isVisited = false
yesButton.backgroundColor = UIColor.lightGray
noButton.backgroundColor = UIColor.red
}
}
// MARK: - Table view data source
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
if indexPath.row == 0 {
// if UIImagePickerController.isSourceTypeAvailable(.photoLibrary) {
// let imagePicker = UIImagePickerController()
// imagePicker.allowsEditing = false
// imagePicker.delegate = self
// imagePicker.sourceType = .photoLibrary
// present(imagePicker, animated: true, completion: nil)
// }
let photoSelect = ZLPhotoActionSheet()
photoSelect.maxSelectCount = 8
photoSelect.maxPreviewCount = 20
photoSelect.sender = self
photoSelect.allowSelectLivePhoto = true
photoSelect.selectImageBlock = {images, assets, isOriginal in
print("")
self.localImages = images
self.bannerView.isHidden = false
self.bannerView.dataSource = self
self.bannerView.shouldLoop = true
self.bannerView.autoScroll = true
self.photoImageView.image = images[0]
}
photoSelect.showPhotoLibrary()
}
}
//MARK: - UIImagePickerControllerDelegate
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
if let selectImage = info[UIImagePickerControllerOriginalImage] as? UIImage {
photoImageView.image = selectImage
photoImageView.contentMode = .scaleAspectFill
photoImageView.clipsToBounds = true
}
let leadingConstraint = NSLayoutConstraint (item: photoImageView, attribute: NSLayoutAttribute.leading, relatedBy: NSLayoutRelation.equal, toItem: photoImageView.superview, attribute: NSLayoutAttribute.leading, multiplier: 1, constant: 0)
leadingConstraint.isActive = true
let trailingConstraint = NSLayoutConstraint(item: photoImageView, attribute: NSLayoutAttribute.trailing, relatedBy: NSLayoutRelation.equal,
toItem: photoImageView.superview, attribute: NSLayoutAttribute.trailing,
multiplier: 1, constant: 0)
trailingConstraint.isActive = true
let topConstraint = NSLayoutConstraint(item: photoImageView, attribute:
NSLayoutAttribute.top, relatedBy: NSLayoutRelation.equal, toItem:
photoImageView.superview, attribute: NSLayoutAttribute.top, multiplier: 1,
constant: 0)
topConstraint.isActive = true
let bottomConstraint = NSLayoutConstraint(item: photoImageView, attribute:
NSLayoutAttribute.bottom, relatedBy: NSLayoutRelation.equal, toItem:
photoImageView.superview, attribute: NSLayoutAttribute.bottom, multiplier: 1,
constant: 0)
bottomConstraint.isActive = true
dismiss(animated: true, completion: nil)
}
}
// MARK: - ZYBannerViewDataSource
extension AddRestaurantController:ZYBannerViewDataSource {
func numberOfItems(inBanner banner: ZYBannerView!) -> Int {
return localImages?.count ?? 0
}
func banner(_ banner: ZYBannerView!, viewForItemAt index: Int) -> UIView! {
guard let image = localImages?[index] else {
return UIView()
}
let imageView = UIImageView(image: image)
imageView.contentMode = .scaleAspectFill
imageView.layer.masksToBounds = true
return imageView
}
}
| c09dc61c42219dc66f6a874c831ffdd9640cf970 | [
"Markdown",
"Swift"
] | 7 | Markdown | KlayThompson/FoodPin | d4609b6168382bfb65eb6bb8e1f685d7d29d2828 | b65cfe49f1ae39431bdae1d21132014ac77b7c32 |
refs/heads/master | <repo_name>bthntr/MyDictionary<file_sep>/MyDictionary/Program.cs
using System;
using System.Collections.Generic;
namespace MyDictionary
{
class Program
{
static void Main(string[] args)
{
Dictionary<string, int> City = new Dictionary<string, int>();
City.Add("İstanbul", 34);
City.Add("Kocaeli", 41);
City.Add("Giresun", 28);
Console.WriteLine(City.Count);
}
}
}
| b455073dbefcd3e734f8fbb8b11b57d22618aeb2 | [
"C#"
] | 1 | C# | bthntr/MyDictionary | 621b810f5b8614f722c63fbafe55592742974241 | 780ac94140516a58573f9cc4fe3cbc46e4469ef1 |
refs/heads/master | <repo_name>thisisedyip/D02<file_sep>/HW02/HW02_ch03_ex03.py
#!/usr/bin/env python
# HW02_ch03_ex03
# This exercise can be done using only the statements and other features we
# have learned so far.
# (1) Write a function that draws a grid like the following:
# + - - - - + - - - - +
# | | |
# | | |
# | | |
# | | |
# + - - - - + - - - - +
# | | |
# | | |
# | | |
# | | |
# + - - - - + - - - - +
# Hint: to print more than one value on a line, you can print a
# comma-separated sequence of values:
# print('+', '-')
# By default, print advances to the next line, but you can
# override that behavior and put a space at the end, like this:
# print('+', end=' ')
# print('-')
# The output of these statements is '+ -'.
# A print statement with no argument ends the current line and
# goes to the next line.
# (2) Write a function that draws a similar grid with four rows and four columns.
################################################################################
# Write your functions below:
# Body
def do_twice(f, s):
f(s)
f(s)
def do_four(f, s):
do_twice(f, s)
do_twice(f, s)
def print_twice(s):
print(' ', end=s)
print(' ', end=s)
"""Constructing a line"""
def linestart(s): #start of line
print(end=s)
do_twice(print_twice, '-')
print(end=' ')
def lineend(s): #end of line
print(end=s)
print()
""" Constucting the beams"""
def beamstart(s): #start of beam
print(end=s)
do_twice(print_twice, ' ')
print(end=' ')
def beamend(s): #end of beam
print(end=s)
print()
def beamall2(s): #1 row of beams for 2x2
do_twice(beamstart, s)
beamend(s)
def beamall4(s): #1 row of beams for 4x4
do_four(beamstart, s)
beamend(s)
""" Constructing the 2x2 box """
def two_by_two():
"""top line"""
do_twice(linestart, '+')
lineend('+')
"""beams 1"""
do_four(beamall2, '|')
"""middle line"""
do_twice(linestart, '+')
lineend('+')
"""beams 2"""
do_four(beamall2, '|')
"""bottom line"""
do_twice(linestart, '+')
lineend('+')
def four_by_four():
"""top line"""
do_four(linestart, '+')
lineend('+')
"""beams 1"""
do_four(beamall4, '|')
"""bottom line 1"""
do_four(linestart, '+')
lineend('+')
"""beams 2"""
do_four(beamall4, '|')
"""bottom line 2"""
do_four(linestart, '+')
lineend('+')
"""beams 3"""
do_four(beamall4, '|')
"""bottom line 3"""
do_four(linestart, '+')
lineend('+')
"""beams 4"""
do_four(beamall4, '|')
"""bottom line 4"""
do_four(linestart, '+')
lineend('+')
# Write your functions above:
################################################################################
def main():
"""Call your functions within this function.
When complete have two function calls in this function:
two_by_two()
four_by_four()
"""
print("Hello World!")
two_by_two()
four_by_four()
if __name__ == "__main__":
main()
| ea4efeccb47b5aed7221053fba2d7fb20ad642fc | [
"Python"
] | 1 | Python | thisisedyip/D02 | 1862f02c54404a198e35633e7e66e231e7766b02 | 6787ad74ef03024c54f79756d732abf1572ac1db |
refs/heads/master | <file_sep>package gui;
import Objects.Demon;
import Objects.Heroe;
import enums.GameObjectType;
import enums.MoveAction;
import enums.MovingDirect;
import interfaces.map.PaintMap;
import interfaces.object.MoveObject;
import observer.MoveListener;
import javax.swing.*;
import javax.swing.border.BevelBorder;
import javax.swing.border.SoftBevelBorder;
import java.awt.*;
import java.awt.event.*;
import java.util.Random;
public class FrameGame extends FrameBaseChild implements MoveListener {
private PaintMap map;
private JButton btnleft;
private JButton btndown;
private JButton btnright;
private JButton btnup;
private JButton btnwait;
private JButton btnmenu;
private JButton btnsave;
private JLabel jLabel1;
private JLabel jLabel2;
private JLabel jLabel3;
private JLabel jLabel4;
private JPanel jPanel1;
private JPanel jPanel2;
private JPanel jPanel3;
private JSeparator jSeparator1;
private MoveTimer timer = new MoveTimer();
public FrameGame() {
initComponents();
}
public void setMap(PaintMap map){
this.map = map;
map.paintMap();
jPanel1.removeAll();
jPanel1.add(map.getMap());
jLabel3.setText(String.valueOf(Heroe.getScore()));
jLabel4.setText(String.valueOf(map.getGameMap().getLimit()));
timer.start();
map.getGameMap().getGameCollection().addListener(this);
}
private void initComponents() {
jSeparator1 = new JSeparator();
jPanel1 = new JPanel();
jPanel2 = new JPanel();
btnup = new JButton();
btnleft = new JButton();
btndown = new JButton();
btnright = new JButton();
btnwait = new JButton();
jPanel3 = new JPanel();
btnsave = new JButton();
btnmenu = new JButton();
jLabel1 = new JLabel();
jLabel2 = new JLabel();
jLabel3 = new JLabel();
jLabel4 = new JLabel();
ButtonListener lis = new ButtonListener();
jSeparator1.setBorder(BorderFactory.createEtchedBorder());
jPanel1.setBorder(BorderFactory.createBevelBorder(BevelBorder.RAISED));
jPanel1.setForeground(new Color(100, 100, 100));
GroupLayout jPanel1Layout = new GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGap(0, 441, Short.MAX_VALUE)
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGap(0, 0, Short.MAX_VALUE)
);
jPanel2.setBorder(new SoftBevelBorder(BevelBorder.LOWERED));
btnup.setBackground(UIManager.getDefaults().getColor("info"));
btnup.setText("В");
btnup.addActionListener(lis);
btnleft.setBackground(UIManager.getDefaults().getColor("info"));
btnleft.setText("Л");
btnleft.addActionListener(lis);
btndown.setBackground(UIManager.getDefaults().getColor("info"));
btndown.setText("Н");
btndown.addActionListener(lis);
btnright.setBackground(UIManager.getDefaults().getColor("info"));
btnright.setText("П");
btnright.addActionListener(lis);
btnwait.setBackground(UIManager.getDefaults().getColor("info"));
btnwait.setText("Пропустить");
btnwait.addActionListener(lis);
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(btnwait, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(btnup, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(btnleft, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btndown, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnright, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup()
.addContainerGap(21, Short.MAX_VALUE)
.addComponent(btnup, javax.swing.GroupLayout.PREFERRED_SIZE, 30, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnleft, javax.swing.GroupLayout.PREFERRED_SIZE, 30, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btndown, javax.swing.GroupLayout.PREFERRED_SIZE, 30, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnright, javax.swing.GroupLayout.PREFERRED_SIZE, 30, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(btnwait)
.addContainerGap())
);
jPanel3.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.LOWERED));
btnsave.setBackground(javax.swing.UIManager.getDefaults().getColor("info"));
btnsave.setText("Сохранить");
btnsave.addActionListener(lis);
btnmenu.setBackground(javax.swing.UIManager.getDefaults().getColor("info"));
btnmenu.setText("Меню");
btnmenu.addActionListener(lis);
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(btnsave)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(btnmenu, javax.swing.GroupLayout.PREFERRED_SIZE, 94, javax.swing.GroupLayout.PREFERRED_SIZE))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addComponent(btnmenu, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnsave, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE))
);
jLabel1.setText("Счет:");
jLabel2.setText("Осталось ходов:");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel1, GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jSeparator1, javax.swing.GroupLayout.PREFERRED_SIZE, 17, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addGroup(layout.createSequentialGroup()
.addGap(10, 10, 10)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2)
.addComponent(jLabel1))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jLabel3, javax.swing.GroupLayout.DEFAULT_SIZE, 30, Short.MAX_VALUE)
.addComponent(jLabel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(0,0, Short.MAX_VALUE))
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jSeparator1)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(jLabel3))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(jLabel4))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 10, Short.MAX_VALUE)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
setSize(616,413);
addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_UP)moving(MovingDirect.UP, GameObjectType.HEROE);
if (e.getKeyCode() == KeyEvent.VK_DOWN)moving(MovingDirect.DOWN, GameObjectType.HEROE);
if (e.getKeyCode() == KeyEvent.VK_LEFT)moving(MovingDirect.LEFT, GameObjectType.HEROE);
if (e.getKeyCode() == KeyEvent.VK_RIGHT)moving(MovingDirect.RIGHT, GameObjectType.HEROE);
}
});
}
private class ButtonListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == btnleft) moving(MovingDirect.LEFT, GameObjectType.HEROE);
if (e.getSource() == btnright) moving(MovingDirect.RIGHT, GameObjectType.HEROE);
if (e.getSource() == btnup) moving(MovingDirect.UP, GameObjectType.HEROE);
if (e.getSource() == btndown) moving(MovingDirect.DOWN, GameObjectType.HEROE);
if (e.getSource() == btnwait)
if (e.getSource() == btnmenu) closeFrame();
if (e.getSource() == btnsave);
}
}
private void moving(MovingDirect direct, GameObjectType type) {
map.getGameMap().getGameCollection().moveObject(direct, type);
}
@Override
public void notifyAction(MoveAction action, Heroe heroe) {
if (action == MoveAction.DIE || map.getGameMap().getLimit()== 0){
timer.stop();
JOptionPane.showConfirmDialog(null, "Поражение", "Вы проиграли!", JOptionPane.PLAIN_MESSAGE);
closeFrame();
}
if (action == MoveAction.STAY){}
if(action==MoveAction.COLLECT){
jLabel3.setText(String.valueOf(Heroe.getScore()));
}
jLabel4.setText(String.valueOf(map.getGameMap().getLimit()));
map.paintMap();
}
private class MoveTimer implements ActionListener {
private javax.swing.Timer time;
private MoveTimer() {
time = new javax.swing.Timer(500, this);
time.setInitialDelay(0);
}
public void start() {
time.start();
}
public void stop() {
time.stop();
}
@Override
public void actionPerformed(ActionEvent e) {
map.getGameMap().getGameCollection().moveObject(randomDirect(),GameObjectType.MONSTER);
}
private MovingDirect randomDirect(){
Random r = new Random();
MovingDirect dir = null;
switch (r.nextInt(4)){
case 0:{
dir = MovingDirect.UP;
break;
}
case 1:{
dir = MovingDirect.DOWN;
break;
}
case 2:{
dir = MovingDirect.LEFT;
break;
}
case 3:{
dir = MovingDirect.RIGHT;
break;
}
}
return dir;
}
}
}
<file_sep>package observer;
import interfaces.map.GameCollection;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Admin on 18.02.2016.
*/
public abstract class MapListener implements GameCollection {
private ArrayList<MoveListener> listeners = new ArrayList<>();
@Override
public List<MoveListener> getListener(){
return listeners;
}
@Override
public void addListener(MoveListener listener){
listeners.add(listener);
}
@Override
public void removeListener(MoveListener listener){
listeners.remove(listener);
}
@Override
public void removeAll(){
listeners.clear();
}
}
<file_sep>package gui;
import javax.swing.*;
import javax.swing.table.DefaultTableModel;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
public class FrameStatistic extends FrameBaseChild {
private JButton btnmenu;
private JButton btnreset;
private JPanel jPanel1;
private JScrollPane jScrollPane2;
private JTable jTable2;
public FrameStatistic() {
initComponents();
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">
private void initComponents() {
jPanel1 = new JPanel();
btnmenu = new JButton();
btnreset = new JButton();
jScrollPane2 = new JScrollPane();
jTable2 = new JTable();
ButtonListener listener = new ButtonListener();
setTitle("Джастата");
btnmenu.setText("Меню");
btnmenu.addActionListener(listener);
btnreset.setText("Очистить");
btnreset.addActionListener(listener);
GroupLayout jPanel1Layout = new GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(btnmenu, GroupLayout.PREFERRED_SIZE, 100, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(LayoutStyle.ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnreset, GroupLayout.PREFERRED_SIZE, 100, GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(GroupLayout.Alignment.BASELINE)
.addComponent(btnmenu, GroupLayout.PREFERRED_SIZE, 40, GroupLayout.PREFERRED_SIZE)
.addComponent(btnreset, GroupLayout.PREFERRED_SIZE, 40, GroupLayout.PREFERRED_SIZE))
.addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jTable2.setBorder(BorderFactory.createLineBorder(new Color(204, 255, 204)));
jTable2.setModel(new DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Имя :", "Счет :", "Сыграно всего:", "Удачных игр:"
}
) {
Class[] types = new Class [] {
String.class, Long.class, Integer.class, Integer.class
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
});
jScrollPane2.setViewportView(jTable2);
GroupLayout layout = new GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2, GroupLayout.DEFAULT_SIZE, 482, Short.MAX_VALUE)
.addComponent(jPanel1, GroupLayout.Alignment.TRAILING, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jScrollPane2, GroupLayout.PREFERRED_SIZE, 251, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(LayoutStyle.ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
pack();
}
private class ButtonListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == btnmenu) closeFrame();
else if (e.getSource() == btnreset) {
}
}
}
}
<file_sep>package enums;
import Objects.Treashure;
/**
* Created by Admin on 11.02.2016.
*/
public enum TreashureType {
GOLD(5),
SILVER(3),
Cooper(1);
TreashureType(int equil){
this.equil = equil;
}
public int getEquil() {
return equil;
}
int equil;
}
<file_sep>package Abstracts;
import Objects.Coordinate;
import enums.GameObjectType;
import interfaces.map.GameCollection;
import interfaces.object.StaticObject;
import javax.swing.*;
public abstract class AbstractGameObject implements StaticObject {
private GameObjectType type;
private Coordinate coordinate;
private ImageIcon icon = getImageIcon("");
public AbstractGameObject() {
}
@Override
public ImageIcon getIcon() {
return icon;
}
public void setIcon(ImageIcon icon) {
this.icon = icon;
}
protected ImageIcon getImageIcon(String path) {
return new ImageIcon(getClass().getResource(path));
}
@Override
public GameObjectType getType() {
return type;
}
public void setType(GameObjectType type) {
this.type = type;
}
public Coordinate getCoordinate() {
return coordinate;
}
public void setCoordinate(Coordinate coordinate) {
this.coordinate = coordinate;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AbstractGameObject that = (AbstractGameObject) o;
if (type != that.type) return false;
return coordinate.equals(that.coordinate);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + coordinate.hashCode();
return result;
}
@Override
public String toString() {
return "AbstractGameObjct{" +
"type=" + type +
", coordinate=" + coordinate +
'}';
}
}
<file_sep>package enums;
import java.io.Serializable;
/**
* Created by Admin on 25.01.2016.
*/
public enum GameObjectType implements Serializable {
MONSTER(5),
TREASURE(4),
EXIT(3),
HEROE(2),
WALL(1),
NOTHING(-1);
GameObjectType(int priority) {
this.priority = priority;
}
private int priority;
public int getPriority() {
return priority;
}
}
<file_sep>package gui.map;
import Abstracts.AbstractGameMap;
import Abstracts.AbstractGameObject;
import Objects.Coordinate;
import collection.MapCollection;
import Objects.Nothing;
import Objects.Wall;
import creator.MapCreator;
import enums.GameObjectType;
import enums.MapSource;
import enums.MovingDirect;
import interfaces.map.PaintMap;
import javax.swing.*;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumn;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Random;
public class JTableMap implements PaintMap {
private JTable table = new JTable();
private AbstractGameMap gamemap;
private String[] columnName;
private AbstractGameObject[][] mapObjects;
public JTableMap(MapSource type, Object source, MapCollection collection){
table.setEnabled(false);
table.setSize(400,400);
table.setShowHorizontalLines(false);
table.setShowVerticalLines(false);
table.setTableHeader(null);
table.setRowHeight(30);
table.setRowSelectionAllowed(false);
gamemap = MapCreator.getJmap().createMap(type,collection);
gamemap.loadMap(source);
}
public void addMapObject(){
mapObjects = new AbstractGameObject[gamemap.getHeight()][gamemap.getWight()];
for (int y = 0; y < gamemap.getHeight() ; y++) {
for (int x = 0; x < gamemap.getWight() ; x++) {
mapObjects[y][x] = new Nothing(new Coordinate(x,y));
}
}
for(AbstractGameObject gameObj:gamemap.getGameCollection().getGameObject().values()) {
if (!gameObj.getType().equals(GameObjectType.NOTHING)) {
int x = gameObj.getCoordinate().getX();
int y = gameObj.getCoordinate().getY();
if (!(mapObjects[y][x] instanceof Nothing) & !(mapObjects[y][x] instanceof Wall)) {
AbstractGameObject temp = mapObjects[y][x];
mapObjects[y][x] = gamemap.getObjectPriority(temp, gameObj);
} else{
mapObjects[y][x] = gameObj;
}
}
}
}
@Override
public boolean paintMap() {
addMapObject();
columnName = new String[gamemap.getWight()];
for (int i = 0; i < columnName.length ; i++) {
columnName[i] = "";
}
table.setModel(new DefaultTableModel(mapObjects,columnName));
for (int i = 0; i < table.getColumnCount(); i++) {
table.getColumnModel().getColumn(i).setCellRenderer(new ImageRenderer());
TableColumn a = table.getColumnModel().getColumn(i);
a.setPreferredWidth(26);
}
return true;
}
@Override
public Component getMap() {
return table;
}
@Override
public AbstractGameMap getGameMap() {
paintMap();
return gamemap;
}
}
<file_sep>package collection;
import Abstracts.AbstractGameObject;
import Abstracts.AbstractMoveObject;
import Objects.Coordinate;
import enums.GameObjectType;
import enums.MoveAction;
import enums.MovingDirect;
import Objects.Heroe;
import Objects.Nothing;
import observer.MapListener;
import observer.MoveListener;
import java.util.*;
public class MapCollection extends MapListener {
private HashMap<Coordinate, AbstractGameObject> gameObject = new HashMap();
private EnumMap<GameObjectType, ArrayList<AbstractGameObject>> objectType = new EnumMap(GameObjectType.class);
@Override
public void addGameObject(AbstractGameObject newObject) {
ArrayList<AbstractGameObject> temp = objectType.get(newObject.getType());
if (temp == null) {
temp = new ArrayList();
}
temp.add(newObject);
gameObject.put(newObject.getCoordinate(), newObject);
objectType.put(newObject.getType(), temp);
}
@Override
public ArrayList<AbstractGameObject> getGameObject(GameObjectType type) {
return objectType.get(type);
}
@Override
public AbstractGameObject getObjectByCoordinate(Coordinate coordinate) {
return gameObject.get(coordinate);
}
@Override
public AbstractGameObject getObjectByCoordinate(int x, int y) {
return gameObject.get(new Coordinate(x, y));
}
@Override
public HashMap<Coordinate, AbstractGameObject> getGameObject() {
return gameObject;
}
@Override
public void moveObject(MovingDirect direct, GameObjectType type) {
MoveAction action = null;
Heroe heroe = (Heroe)getGameObject(GameObjectType.HEROE).get(0);
for (AbstractGameObject gameObject : this.getGameObject(type)) {
if (gameObject instanceof AbstractMoveObject) {
AbstractGameObject secondGameObject = getObjectByCoordinate(getNewCoordinate(direct,gameObject));
switch (((AbstractMoveObject) gameObject).move(secondGameObject)){
case DIE:
{
action = MoveAction.DIE;
break;
}
case COLLECT:{
swapObject(gameObject,new Nothing(getNewCoordinate(direct,gameObject)));
break;
}
case MOVING:{
swapObject(gameObject,secondGameObject);
break;
}
case EXIT:{
}
case STAY: {
action = MoveAction.STAY;
break;
}
}
((AbstractMoveObject) gameObject).changeIcon(direct,type);
}
}
notifyListener(action,heroe);
}
private void swapObject(AbstractGameObject firstObject, AbstractGameObject secondObject) {
Coordinate coordinate = firstObject.getCoordinate();
firstObject.setCoordinate(secondObject.getCoordinate());
secondObject.setCoordinate(coordinate);
gameObject.put(firstObject.getCoordinate(),firstObject);
gameObject.put(secondObject.getCoordinate(),secondObject);
}
public Coordinate getNewCoordinate(MovingDirect direct, AbstractGameObject object) {
int x = object.getCoordinate().getX();
int y = object.getCoordinate().getY();
Coordinate newcoord = new Coordinate(x,y);
switch (direct) {
case UP: {
newcoord.setY(newcoord.getY() - 1);
break;
}
case LEFT: {
newcoord.setX(newcoord.getX() - 1);
break;
}
case DOWN: {
newcoord.setY(newcoord.getY() + 1);
break;
}
case RIGHT: {
newcoord.setX(newcoord.getX() + 1);
break;
}
}
return newcoord;
}
@Override
public void notifyListener(MoveAction action, Heroe heroe) {
for (MoveListener listener:getListener()){
listener.notifyAction(action,heroe);
}
}
}
<file_sep>package Objects;
import Abstracts.AbstractGameObject;
import Abstracts.AbstractMoveObject;
import enums.GameObjectType;
import enums.MoveAction;
import enums.MovingDirect;
/**
* Created by Admin on 27.01.2016.
*/
public class Heroe extends AbstractMoveObject {
public Heroe(Coordinate coordinate) {
super.setCoordinate(coordinate);
super.setType(GameObjectType.HEROE);
changeIcon(MovingDirect.UP,GameObjectType.HEROE);
}
private static int tern = 0;
private static int score = 0;
@Override
public MoveAction move(AbstractGameObject obj) {
MoveAction moveResult = null;
if (obj == null){
return MoveAction.STAY;
}
switch (obj.getType()){
case NOTHING:{
moveResult = MoveAction.MOVING;
break;
}
case MONSTER:{
moveResult = MoveAction.DIE;
break;
}
case TREASURE:{
moveResult = MoveAction.COLLECT;
setScore(((Treashure)obj).getExp());
break;
}
case EXIT:{
moveResult = MoveAction.EXIT;
break;
}
case WALL:{
moveResult = MoveAction.STAY;
break;
}
default:moveResult = MoveAction.STAY;
}
return moveResult;
}
public static int getTern() {
return tern;
}
public static int getScore() {
return score;
}
public void setScore(int score) {
this.score += score;
}
public void setTern(int tern) {
this.tern += tern;
}
}
<file_sep>package creator;
import Abstracts.AbstractGameMap;
import Abstracts.AbstractGameObject;
import collection.MapCollection;
import enums.MapSource;
import mapsources.FSMap;
/**
* Created by Admin on 05.02.2016.
*/
public class MapCreator {
private static MapCreator jmap;
public static MapCreator getJmap(){
if (jmap == null)
jmap = new MapCreator();
return jmap;
}
public AbstractGameMap createMap(MapSource type, MapCollection collection){
AbstractGameMap map = null;
switch (type){
case FS:
{
map = new FSMap(collection);
break;
}
}
return map;
}
}
<file_sep>package enums;
/**
* Created by Admin on 14.02.2016.
*/
public enum MoveAction {
MOVING,
DIE,
COLLECT,
STAY,
EXIT
}
<file_sep>package Abstracts;
import interfaces.map.GameCollection;
import interfaces.map.GameMap;
import java.io.Serializable;
/**
* Created by Admin on 02.02.2016.
*/
public abstract class AbstractGameMap implements GameMap, Serializable {
private static final long serialVersionUID = 1L;
public AbstractGameMap(GameCollection gameCollection) {
this.gameCollection = gameCollection;
}
private GameCollection gameCollection;
private String Name;
private int Height;
private int Wight;
private int Limit;
private boolean isExit;
private boolean isHeroe;
public GameCollection getGameCollection() {
return gameCollection;
}
public void setName(String name) {
Name = name;
}
@Override
public int getHeight() {
return Height;
}
public void setHeight(int height) {
Height = height;
}
@Override
public int getWight() {
return Wight;
}
public void setWight(int wight) {
Wight = wight;
}
@Override
public int getLimit() {
return Limit;
}
public void setLimit(int limit) {
Limit = limit;
}
public boolean isExit() {
return isExit;
}
public void setExit(boolean exit) {
isExit = exit;
}
public boolean isHeroe() {
return isHeroe;
}
public void setHeroe(boolean heroe) {
isHeroe = heroe;
}
public AbstractGameObject getObjectPriority(AbstractGameObject a, AbstractGameObject b) {
return (a.getType().getPriority() > b.getType().getPriority()) ? a : b;
}
public boolean isValidMap() {
return isExit && isHeroe;
}
}
<file_sep>package Objects;
import Abstracts.AbstractGameObject;
import enums.GameObjectType;
public class Exit extends AbstractGameObject {
public Exit(Coordinate coordinate) {
super.setType(GameObjectType.EXIT);
super.setIcon(getImageIcon("../res/exit.png"));
super.setCoordinate(coordinate);
}
}
<file_sep>package gui;
import javax.swing.*;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
/**
* Created by Admin on 03.02.2016.
*/
public abstract class FrameBaseChild extends JFrame {
FrameBaseChild(){
setCloseOperation();
}
private JFrame parentFrame;
protected void openFrame(JFrame parent){
parentFrame = parent;
parent.setVisible(false);
super.setVisible(true);
super.setLocationRelativeTo(null);
super.setResizable(false);
super.requestFocusInWindow();
}
protected void closeFrame(){
if (parentFrame == null) throw new IllegalArgumentException("");
parentFrame.setVisible(true);
super.setVisible(false);
}
protected void setCloseOperation(){
super.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
super.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
closeFrame();
}
});
}
}
| aee672922ca56e1650268f721b5a71dde91a2e81 | [
"Java"
] | 14 | Java | Velheor86/TreasureHanters | 05f22a6855b58510b0137948a65e71e8e8d02b80 | 5600f17deb5b28a1681906b9d6ef8f06bd699c05 |
refs/heads/master | <file_sep>#include <opencv2/opencv.hpp>
#include <iostream>
using namespace cv;
using namespace std;
int main()
{
Mat Original,ImgGray;
Original = imread("C:/Users/ksrnd/Desktop/Light/good/20190326_002.png");
cvtColor(Original,ImgGray,CV_BGR2GRAY);
medianBlur(ImgGray, ImgGray, 5);
Mat CannyFilter;
GaussianBlur(ImgGray, ImgGray, Size(9, 9), 2, 2);
Canny(ImgGray, CannyFilter, 10, 30, 3);
// imshow("CannyFilter",CannyFilter);
vector<Vec3f> circles;
int min_radius = 10;
int max_radius = 59;
HoughCircles(CannyFilter, circles, HOUGH_GRADIENT, 2,
CannyFilter.rows/16, // change this value to detect circles with different distances to each other
10, 90,
min_radius, max_radius // (min_radius & max_radius) to detect larger circles
);
Mat Roi;
Point center;
for( size_t i = 0; i < circles.size(); i++ )
{
Vec3i c = circles[i];
center = Point(c[0], c[1]);
// circle center
circle( Original, center, 1, Scalar(0,100,100), 3, LINE_AA);
Rect r(center.x-max_radius, center.y-max_radius,max_radius*2,max_radius*2);
Roi= Original(Rect(Point( center.x-max_radius, center.y-max_radius ) ,Point(center.x+max_radius+10, center.y+max_radius+10 )));
// circle outline
int radius = c[2];
circle( Original, center, radius, Scalar(255,0,255), 3, LINE_AA);
}
cout << Roi.size() << endl;
cout << Original.size() << endl;
Mat result;
result = Roi - Roi;
imshow("result",result);
Mat imageROI = Original(Rect(center.x-max_radius, center.y-max_radius, result.cols, result.rows));
addWeighted(result, 0.0, imageROI, 0.0, 0., imageROI);
imshow("imageROI",imageROI);
imshow("original",Original);
imshow("Roi",Roi);
waitKey();
}
| 7f014183bf534ccd6dfe9cf2e16f391eb959e6f5 | [
"C++"
] | 1 | C++ | jack7141/HoughCircle | b30d8b8858813de85bbdec892da8e3d42d410016 | 7296269c51c7c66060e40cab6925120f88f8ddea |
refs/heads/master | <repo_name>ibmendoza/learn-go<file_sep>/chapter18/point-new/point-new.go
package main
import "fmt"
// point with a latitude, longitude.
type point struct {
lat, long float64
}
// coordinate in degrees, minutes, seconds in a N/S/E/W hemisphere.
type coordinate struct {
d, m, s float64
h rune
}
// newPoint from latitude, longitude d/m/s coordinates.
func newPoint(lat, long coordinate) point {
return point{lat.decimal(), long.decimal()}
}
// decimal converts a d/m/s coordinate to decimal degrees.
func (c coordinate) decimal() float64 {
sign := 1.0
switch c.h {
case 'S', 'W', 's', 'w':
sign = -1
}
return sign * (c.d + c.m/60 + c.s/3600)
}
func main() {
curiosity := newPoint(coordinate{4, 35, 22.2, 'S'}, coordinate{137, 26, 30.12, 'E'})
fmt.Println(curiosity)
}
<file_sep>/solutions/chapter18/landing/landing.go
package main
import "fmt"
// point with a latitude, longitude.
type point struct {
lat, long float64
}
// coordinate in degrees, minutes, seconds in a N/S/E/W hemisphere.
type coordinate struct {
d, m, s float64
h rune
}
// newPoint from latitude, longitude d/m/s coordinates.
func newPoint(lat, long coordinate) point {
return point{lat.decimal(), long.decimal()}
}
// decimal converts a d/m/s coordinate to decimal degrees.
func (c coordinate) decimal() float64 {
sign := 1.0
switch c.h {
case 'S', 'W', 's', 'w':
sign = -1
}
return sign * (c.d + c.m/60 + c.s/3600)
}
func main() {
spirit := newPoint(coordinate{14, 34, 6.2, 'S'}, coordinate{175, 28, 21.5, 'E'})
opportunity := newPoint(coordinate{1, 56, 46.3, 'S'}, coordinate{354, 28, 24.2, 'E'})
curiosity := newPoint(coordinate{4, 35, 22.2, 'S'}, coordinate{137, 26, 30.12, 'E'})
insight := newPoint(coordinate{3, 0, 0.0, 'N'}, coordinate{154, 41, 60.0, 'E'})
fmt.Println("Spirit", spirit)
fmt.Println("Opportunity", opportunity)
fmt.Println("Curiosity", curiosity)
fmt.Println("InSight", insight)
}
<file_sep>/solutions/chapter17/point/point.go
package main
import "fmt"
func main() {
type point struct {
lat float64
long float64
}
var spirit point
spirit.lat = -14.5684
spirit.long = 175.472636
var opportunity point
opportunity.lat = -1.9462
opportunity.long = 354.4734
var curiosity point
curiosity.lat = -4.5895
curiosity.long = 137.4417
fmt.Println(spirit, opportunity, curiosity)
}
<file_sep>/chapter17/point/point.go
package main
import "fmt"
func main() {
type point struct {
lat float64
long float64
}
var spirit point
spirit.lat = -14.5684
spirit.long = 175.472636
var opportunity point
opportunity.lat = -1.9462
opportunity.long = 354.4734
fmt.Println(spirit, opportunity)
}
<file_sep>/solutions/chapter17/data-point/data-point.go
package main
import (
"encoding/json"
"fmt"
"os"
)
func main() {
type point struct {
Lat float64 `json:"latitude"`
Long float64 `json:"longitude"`
}
type Celsius float64
type dataPoint struct {
Sol int `json:"sol"`
Location point `json:"location"`
Temperature Celsius `json:"temperature"`
}
curiosity := point{-4.5895, 137.4417}
data := dataPoint{Sol: 0, Location: curiosity, Temperature: -33.0}
bytes, err := json.Marshal(data)
exitOnError(err)
fmt.Println(string(bytes))
}
// exitOnError prints any errors and exits
func exitOnError(err error) {
if err != nil {
fmt.Println(err)
os.Exit(1)
}
}
| 5cc506a0fffcd6778a13e42b2aa9564661b60454 | [
"Go"
] | 5 | Go | ibmendoza/learn-go | af1194dd264bb390980cc31368e479b13ee6361b | 999a064db38d93ab7232fa2074725cf96f47a3a8 |
refs/heads/master | <repo_name>frostyandy2k/OntoMoPP<file_sep>/Core/org.emftext.language.owl.test/src/org/emftext/language/owl/test/OWLTest.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owl.test;
import static org.junit.Assert.*;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.Resource.Diagnostic;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.emftext.language.owl.resource.owl.mopp.OwlMetaInformation;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class OWLTest {
@Before
public void setUp() {
registerResourceFactories();
}
@Test
@Ignore
public void testRegularExpressions() {
// TODO enable this test
// Matcher matcher = OwlScannerlessParser.TOKEN_IRI.matcher("Class:");
// assertFalse(matcher.matches());
}
@Test
public void testSimpleOntologyParsing() {
parseString("Ontology:");
parseString("Ontology: Class: A");
parseString("Ontology: Class: A SubClassOf: id min 1 and id max 1");
parseString("Ontology: Class: A SubClassOf: id min 1 and id max 1 Class: B");
}
@Test
public void testExampleOntologies() {
File modelDir = new File(".." + File.separator
+ "org.emftext.language.owl" + File.separator + "models");
assertTrue(modelDir.exists());
File[] exampleFiles = modelDir.listFiles(new FileFilter() {
public boolean accept(File file) {
String filename = file.getName();
boolean isOwlFile = filename.endsWith("."
+ new OwlMetaInformation().getSyntaxName());
return isOwlFile && !"ill.owl".equals(filename)
&& !"large_galen.owl".equals(filename);
}
});
for (File exampleFile : exampleFiles) {
System.out.println("Parsing " + exampleFile.getName());
try {
parse(new FileInputStream(exampleFile));
} catch (FileNotFoundException e) {
fail(e.getMessage());
}
}
}
private void parseString(String document) {
System.out.println("Parsing \"" + document + "\"");
InputStream inputStream = new ByteArrayInputStream(document.getBytes());
parse(inputStream);
}
private void parse(InputStream inputString) {
ResourceSet rs = new ResourceSetImpl();
Resource r = rs.createResource(URI.createURI("temp.owl"));
assertNotNull(r);
try {
r.load(inputString, null);
} catch (IOException e) {
e.printStackTrace();
fail(e.getMessage());
}
List<Diagnostic> errors = r.getErrors();
for (Diagnostic error : errors) {
System.out.println("Error " + error.getMessage() + " at "
+ error.getLine() + "," + error.getColumn());
}
List<EObject> contents = r.getContents();
assertTrue(contents.size() > 0);
}
private void registerResourceFactories() {
Resource.Factory.Registry.INSTANCE
.getExtensionToFactoryMap()
.put(
"owl",
new org.emftext.language.owl.resource.owl.mopp.OwlResourceFactory());
}
}
<file_sep>/Extensions/org.emftext.language.swrl/metamodel/swrl.cs
SYNTAXDEF swrl
FOR <http://www.emftext.org/language/swrl>
START SWRLDocument
//IMPORTS {
// owl : <http://org.emftext/owl.ecore> <../../org.emftext.language.owl/metamodel/owl.genmodel>
// WITH SYNTAX owl <../../org.emftext.language.owl/metamodel/owl.cs>
//}
OPTIONS {
licenceHeader ="../../org.dropsbox/licence.txt";
usePredefinedTokens = "false";
disableLaunchSupport = "true";
disableDebugSupport = "true";
}
TOKENS {
DEFINE COMMENTS $'//'(~('\n'|'\r'))*$;
DEFINE WHITESPACE $(' '|'\t'|'\f')$;
DEFINE LINEBREAKS $('\r\n'|'\r'|'\n')$;
// copied from OWL syntax
DEFINE IRI $(('<')(~('>')|('\\''>'))*('>'))|(('A'..'Z' | ':' | 'a'..'z' | '0'..'9' | '_' | '-' )+)$;
}
TOKENSTYLES {
"COMMENTS" COLOR #008000;
}
RULES {
SWRLDocument ::=
("import" imports[IRI])+
rules+;
Rule ::= antecedent "=>" consequent;
Antecedent ::= body ("and" body)*;
Consequent ::= body ("and" body)*;
DescriptionAtom ::= description "(" object ")";
PropertyAtom ::= property[IRI] "(" source "," target ")";
//DataRangeAtom ::= dataRange[IRI] "(" object ")";
DifferentFromAtom ::= "differentFrom" "(" objectA "," objectB ")";
SameAsAtom ::= "sameAs" "(" objectA "," objectB ")";
// TODO DVariable ::= uri[];
DLiteral ::= literal;
IVariable ::= "?" iri[IRI];
DVariable ::= "?" iri[IRI];
// adapted from OWL syntax
ClassAtomic ::= not["not" : ""] clazz[IRI];
//NestedDescription ::= not["not" : ""] description : ClassAtomic;
BooleanLiteral ::= value["true":"false"];
}<file_sep>/Extensions/org.emftext.ontomopp.modelsync.test/src/org/emftext/ontomopp/modelsync/test/OWLTestHelper.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.ontomopp.modelsync.test;
import static org.junit.Assert.fail;
import java.io.File;
import java.util.Set;
import org.eclipse.emf.common.util.URI;
import org.mindswap.pellet.PelletOptions;
import org.mindswap.pellet.exceptions.InconsistentOntologyException;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import com.clarkparsia.owlapi.explanation.PelletExplanation;
import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
public class OWLTestHelper {
public OWLOntology loadOntology(OWLOntologyManager manager, URI uri) {
String fileString = uri.toFileString();
IRI iri = IRI.create(new File(fileString));
try {
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(iri);
return ontology;
} catch (OWLOntologyCreationException e) {
e.printStackTrace();
fail(e.getMessage());
return null;
}
}
public PelletReasoner createReasoner(OWLOntology ontology) {
PelletExplanation.setup();
PelletOptions.USE_UNIQUE_NAME_ASSUMPTION = true;
PelletReasoner reasoner = com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory.getInstance().createReasoner(ontology);
try {
reasoner.getKB().realize();
} catch (InconsistentOntologyException e) {
PelletExplanation explanation = new PelletExplanation(ontology);
Set<OWLAxiom> axioms = explanation.getInconsistencyExplanation();
for (OWLAxiom owlAxiom : axioms) {
System.out.println("Axiom involved in inconsistency: " + owlAxiom);
}
fail(e.getMessage());
}
return reasoner;
}
}
<file_sep>/OWLText Languages/org.owltext.language.requirements/metamodel/requirements.text.cs
SYNTAXDEF requirements
FOR <http://www.emftext.org/requirements>
START RequirementsSpec
OPTIONS {
usePredefinedTokens = "false";
overrideBuilder = "false";
disableLaunchSupport = "true";
disableDebugSupport = "true";
}
TOKENS {
DEFINE STRING_LITERAL $'"'('\\'('b'|'t'|'n'|'f'|'r'|'\"'|'\''|'\\')|('\\''u'('0'..'9'|'a'..'f'|'A'..'F')('0'..'9'|'a'..'f'|'A'..'F')('0'..'9'|'a'..'f'|'A'..'F')('0'..'9'|'a'..'f'|'A'..'F'))|'\\'('0'..'7')|~('\\'|'"'))*'"'$;
DEFINE WHITESPACE $(' '|'\t'|'\f')$;
DEFINE LINEBREAKS $('\r\n'|'\r'|'\n')$;
DEFINE TEXT $('A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '-' | '::')+$;
DEFINE SL_COMMENT $'//'(~('\n'|'\r'|'\uffff'))* $ ;
DEFINE ML_COMMENT $'/*'.*'*/'$ ;
}
TOKENSTYLES {
"STRING_LITERAL" COLOR #2A00FF;
"TEXT" COLOR #000000;
// comments
"SL_COMMENT" COLOR #008000;
"ML_COMMENT" COLOR #008000;
"Requirements" COLOR #7F0055, BOLD;
"Documentation" COLOR #7F0055, BOLD;
"customer" COLOR #7F0055, BOLD;
"Categories" COLOR #7F0055, BOLD;
"Components" COLOR #7F0055, BOLD;
"component" COLOR #7F0055, BOLD;
"related to" COLOR #7F0055, BOLD;
"import" COLOR #7F0055, BOLD;
"priority" COLOR #7F0055, BOLD;
"+" COLOR #798f02, BOLD;
"++" COLOR #e08400, BOLD;
"+++" COLOR #ec3a0a, BOLD;
}
RULES {
RequirementsSpec ::= "Requirements" "Documentation"
name[STRING_LITERAL]
("import" imports['<','>'] ";")*
("customer" customer[STRING_LITERAL] )?
description['(',')']?
("Categories" "{"
categories+
"}")?
("Components" "{"
components+
"}")?
("Requirements" "{"
requirements+
"}")?;
Requirement ::= id[] ":" name[] ( "[" category[] ("," category[])* "]")?
priority[
low : "+",
medium : "++",
high : "+++"
] "priority"
description['(', ')']
("component" realisingComponent[])?
("related to" "[" related[] ("," related[])* "]")?;
Category ::= name[] description[STRING_LITERAL];
Component ::= name[] description[STRING_LITERAL];
}<file_sep>/Extensions/org.emftext.language.sparql/metamodel/sparql.cs
//*******************************************************************************
// Copyright (c) 2006-2010
// Software Technology Group, Dresden University of Technology
//
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// which accompanies this distribution, and is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// Contributors:
// Software Technology Group - TU Dresden, Germany
// - initial API and implementation
// ******************************************************************************/
SYNTAXDEF rq
FOR <http://www.emftext.org/sparql>
START SparqlQueries
IMPORTS{
}
OPTIONS {
licenceHeader ="../../org.dropsbox/licence.txt";
tokenspace="1";
defaultTokenName = "IDENTIFIER";
usePredefinedTokens="false";
generateCodeFromGeneratorModel="true";
reloadGeneratorModel="false";
resourcePluginID = "org.emftext.language.sparql.resource.sparql";
basePackage = "org.emftext.language.sparql.resource.sparql";
resourceUIPluginID = "org.emftext.language.sparql.resource.sparql.ui";
uiBasePackage = "org.emftext.language.sparql.resource.sparql.ui";
disableLaunchSupport = "true";
disableDebugSupport = "true";
}
TOKENS{
DEFINE DEF_IRI_REF $'<' (~( '^' | '<' | '>' | '"' | '{' | '}' | '`' | '\\' | '\u0000'..'\u0020' ))* '>'$;
DEFINE DEF_LANGTAG $ '@' ('A'..'Z' | 'a'..'z')+ ('-' ('A'..'Z' | 'a'..'z' | '0'..'9')+)* $;
DEFINE DEF_INTEGER $('+'|'-')? ('0'..'9')+$;
DEFINE DEF_DECIMAL $('+'|'-')? ((('0'..'9')+ '.' ('0'..'9')* )| ('.' ('0'..'9')+ ))$;
DEFINE DEF_DOUBLE $('+'|'-')? ((('0'..'9')+ '.' ('0'..'9')*) | ('.' ('0'..'9')+) | (('0'..'9')+ )) ('e'|'E') ('+'|'-')? ('0'..'9')+ $;
//folgende 3 Definition sind in VARNAME, PN_PREFIX und PN_LOCAL eingearbeitet
// PN_CHARS_BASE $ ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '\u10000'..'\uEFFFF') $;
// PN_CHARS_U $ ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '\u10000'..'\uEFFFF' | '_' ) $;
// PN_CHARS $ ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '\u10000'..'\uEFFFF' | '_' | '.' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040') $;
DEFINE VARNAME $ ('?'|'\u0024') ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6' | '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' ) ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040' )* $; // | '\u10000'..'\uEFFFF' fehlt
DEFINE DEF_PNAME_NS $ (('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6' | '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD') (('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040' | '.')* ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '.' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040'))?)? ':' $; // | '\u10000'..'\uEFFFF' fehlt
DEFINE DEF_A $'a'$;
DEFINE DEF_PN_LOCAL $ (('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6' | '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD') (('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040' | '.')* ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '.' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040'))?)? ':'
('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6' | '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' ) (('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040' | '.' )* ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '.' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040'))? $ ; // | '\u10000'..'\uEFFFF' fehlt
DEFINE DEF_PN_LOCAL_BLANK $ '_:'('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6' | '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' ) (('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040' | '.' )* ('A'..'Z' | 'a'..'z' | '\u00C0'..'\u00D6'| '\u00D8'..'\u00F6' | '\u00F8'..'\u02FF' | '\u0370'..'\u037D' | '\u037F'..'\u1FFF' | '\u200C'..'\u200D' | '\u2070'..'\u218F' | '\u2C00'..'\u2FEF' | '\u3001'..'\uD7FF' | '\uF900'..'\uFDCF' | '\uFDF0'..'\uFFFD' | '_' | '.' | '0'..'9' | '\u00B7' | '\u0300'..'\u036F' | '\u203F'..'\u2040'))? $ ; // | '\u10000'..'\uEFFFF' fehlt
DEFINE DEF_STRING_LITERAL_LONG1 $ '\'\'\'' ( ( '\'' | '\'\'' )? ( ~( '\'' | '\\' ) | ('\\'('t'|'b'|'n'|'r'|'f'|'\\'|'\"'|'\'')) ) )* '\'\'\'' $;
DEFINE DEF_STRING_LITERAL_LONG2 $ '"""' ( ( '"' | '""' )? ( ~( '"' | '\\' ) | ('\\'('t'|'b'|'n'|'r'|'f'|'\\'|'\"'|'\'')) ) )* '"""' $;
DEFINE DEF_STRING_LITERAL1 $ '\'' ( ~( '\u0027' | '\u005C' | '\u000A' | '\u000D' ) | ('\\'('t'|'b'|'n'|'r'|'f'|'\\'|'\"'|'\'')) )* '\''$;
DEFINE DEF_STRING_LITERAL2 $ '"' ( ~( '\u0022' | '\u005C' | '\u000A' | '\u000D' ) | ('\\'('t'|'b'|'n'|'r'|'f'|'\\'|'\"'|'\'')) )* '"'$;
DEFINE COMMENT $'#'(~('\n'|'\r'))*$;
//DEFINE DEF_WS $( '\u0020' | '\u0009' | '\u000D' | '\u000A')$; //=WITHSPACE, überflüssig?
DEFINE WHITESPACE $(' '|'\t'|'\f')$;
DEFINE LINEBREAKS $('\r\n'|'\r'|'\n')$;
}
RULES{
SparqlQueries ::= prologue !0 query+;
Prologue ::= basedeclaration? prefixdeclaration* ;
BaseDecl ::= "BASE" iriref !0 ;
PrefixDecl ::= "PREFIX" pnamens iriref !0 ;
SelectQuery ::= "SELECT" solutionsdisplay? ( var+ | "*" ) datasetclause* !0 whereclause solutionmodifier;
ConstructQuery ::= "CONSTRUCT" constructtemplate datasetclause* !0 whereclause solutionmodifier ;
DescribeQuery ::= "DESCRIBE" ( varoririref+ | "*" ) datasetclause* !0 whereclause? solutionmodifier ;
AskQuery ::= "ASK" datasetclause* !0 whereclause ;
DistinctNE ::= "DISTINCT";
ReducedNE ::= "REDUCED" ;
DatasetClause ::= "FROM" graphclause ;
DefaultGraphClause ::= sourceselector ;
NamedGraphClause ::= "NAMED" sourceselector ;
WhereClause ::= where? groupgraphpattern;
SolutionModifier ::= orderclause? limitoffsetclauses? ;
OrderClause ::= "ORDER" "BY" ordercondition+ ;
OrderConditionLeftNE ::= ascOrDecs brackettedexpression ;
LimitOffsetClausesLeftNE ::= limitclause offsetclause? ;
LimitOffsetClausesRightNE ::= offsetclause limitclause? ;
LimitClause ::= "LIMIT" integer ;
OffsetClause ::= "OFFSET" integer ;
GroupGraphPattern ::= "{" !1 triplesblock? additionalGGPelements* !0 "}" ;
AdditionalGGPElement ::= patternOrFilterNE (".")? triplesblock? ;
TriplesBlock ::= triplessamesubject ( "." !0 triplessamesubject? )* ;
OptionalGraphPattern ::= "OPTIONAL" groupgraphpattern ;
GraphGraphPattern ::= "GRAPH" varoririref groupgraphpattern;
GroupOrUnionGraphPattern ::= groupgraphpattern ( "UNION" groupgraphpattern )* ;
Filter ::= "FILTER" constraint ;
FunctionCall ::= iriref arglist ;
ArgListNILNE ::= nil ;
ArgListExpressionNE ::= "(" expression ( "," addexpression )* ")" ;
ConstructTemplate ::= "{" (constructtriples ( "." constructtriples? )* )? "}" ;
TriplesSameSubjectLeftNE ::= varorterm propertylistnotempty ;
TriplesSameSubjectRightNE ::= triplesnode propertylistnotempty?;
PropertyListNotEmpty ::= verb objectlist ( ";" !0 ( verb objectlist )? )* ;
ObjectList ::= object ( "," object )* ;
Object ::= graphnode ; //abstract möglich?
BlankNodePropertyList ::= "[" propertylistnotempty "]" ;
Collection ::= "(" graphnode+ ")" ;
Expression ::= conditionalorexpression ;
AdditionalExpressionNE ::= conditionalorexpression ;
ConditionalOrExpression ::= conditionalandexpression addconditionalandexpression* ;
AdditionalConditionalAndExpressionNE ::= "||" conditionalandexpression ;
ConditionalAndExpression ::= valuelogical addvaluelogical* ;
ValueLogical ::= relationalexpression ;
AdditionalValueLogicalNE ::= "&&" relationalexpression ;
RelationalExpression ::= numericexpression addnumericexpression?;
EqualsNumericExpressionNE ::= "=" additiveexpression ;
NotEqualNumericExpressionNE ::= "!=" additiveexpression ;
SmallerNumericExpressionNE ::= "<" additiveexpression ;
BiggerNumericExpressionNE ::= ">" additiveexpression ;
SmallerOrEqualNumericExpressionNE ::= "<=" additiveexpression ;
BiggerOrEqualNumericExpressionNE ::= ">=" additiveexpression ;
NumericExpression ::= additiveexpression ;
AdditiveExpression ::= multiplicativeexpression addmultiplicativeexpression *;
PlusMultiplicativeExpressionNE ::= "+" multiplicativeexpression ;
MinusMultiplicativeExpressionNE ::= "-" multiplicativeexpression ;
MultiplicativeExpression ::= unaryexpression addunaryexpression * ;
TimesAdditionalUnaryExpressionNE ::= "*" unaryexpression ;
DividedByAdditionalUnaryExpressionNE ::= "/" unaryexpression ;
NotPrimaryExpressionNE ::= "!" primaryexpression;
PlusPrimaryExpressionNE ::= "+" primaryexpression;
MinusPrimaryExpressionNE ::= "-" primaryexpression;
BrackettedExpression ::= "(" expression ")" ;
StrBuiltInCallNE ::= ("str" | "STR") "(" expression ")" ;
LangBuiltInCallNE ::= ("lang" | "LANG") "(" expression ")" ;
LangmatchesBuiltInCallNE ::= ("langMatches" | "LANGMATCHES") "(" expression "," addexpression ")" ;
DatatypeBuiltInCallNE ::= ("datatype" | "DATATYPE") "(" expression ")" ;
BoundBuiltInCallNE ::= ("bound" | "BOUND") "(" var ")" ;
SameTermBuiltInCallNE ::= "sameTerm" "(" expression "," addexpression ")" ;
IsIRIBuiltInCallNE ::= "isIRI" "(" expression ")" ;
IsURIBuiltInCallNE ::= "isURI" "(" expression ")" ;
IsBlankBuiltInCallNE ::= ("isBlank" | "isBLANK") "(" expression ")" ;
IsLiteralBuiltInCallNE ::= ("isLiteral" | "isLITERAL") "(" expression ")" ;
RegexExpression ::= ("regex"|"REGEX") "(" expression "," addexpression ( "," addexpression )? ")" ;
IRIrefOrFunction ::= iriref arglist? ;
RDFLiteral ::= string langtagoririrefNE? ;
UpIRIrefNE ::= "^^" iriref ;
TrueBooleanLiteralNE ::= "true";
FalseBooleanLiteralNE ::= "false";
IRI_REF ::= iri_ref[DEF_IRI_REF] ;
//WS ::= ws[DEF_WS] ; //ws=WHITESPACE/darum entfernt
PNAME_LN ::= pn_local ;
PNAME_NS ::= pn_prefix[DEF_PNAME_NS] ;
Var ::= varname[VARNAME] ;
PN_LOCAL ::= pn_local[DEF_PN_LOCAL];
BLANK_NODE_LABEL ::= pn_local[DEF_PN_LOCAL_BLANK];
LANGTAG ::= langtag[DEF_LANGTAG];
INTEGER ::= integer[DEF_INTEGER];
DECIMAL ::= decimal[DEF_DECIMAL];
DOUBLE ::= double[DEF_DOUBLE];
STRING_LITERAL_LONG1 ::= string[DEF_STRING_LITERAL_LONG1];
STRING_LITERAL_LONG2 ::= string[DEF_STRING_LITERAL_LONG2];
STRING_LITERAL1 ::= string[DEF_STRING_LITERAL1];
STRING_LITERAL2 ::= string[DEF_STRING_LITERAL2];
NotInList ::= "(" ")"; //ws=WHITESPACE, darum entfernt
ANON ::= "[" "]"; //ws=WHITESPACE, darum entfernt
WhereLiteral ::= "WHERE";
AscendingLiteral ::= "ASC";
DescendingLiteral ::= "DESC";
VerbANE ::= "a";
}<file_sep>/OWLText Languages/org.owltext.language.petrinets/metamodel/petrinets.cs
SYNTAXDEF petrinets
FOR <http://www.emftext.org/language/petrinets>
START PetriNet
OPTIONS {
overrideBuilder = "false";
}
TOKENS {
DEFINE SL_COMMENT $'//'(~('\n'|'\r'|'\uffff'))* $;
DEFINE ML_COMMENT $'/*'.*'*/'$;
}
TOKENSTYLES {
"ML_COMMENT" COLOR #008000, ITALIC;
"SL_COMMENT" COLOR #000080, ITALIC;
}
RULES {
PetriNet ::= "petrinet" name['"','"']? "{" (components | arcs)* "}";
Arc ::= in['"','"'] "->" out['"','"'];
ConsumingArc ::= in['"','"'] "-consume->" out['"','"'];
ProducingArc ::= in['"','"'] "-produce->" out['"','"'];
Place ::= "place" name['"','"']?;
Transition ::= "transition" name['"','"']?;
}<file_sep>/Extensions/org.emftext.language.swrl.resource.swrl/src/org/emftext/language/swrl/resource/swrl/analysis/PropertyAtomPropertyReferenceResolver.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
/**
* <copyright>
* </copyright>
*
*
*/
package org.emftext.language.swrl.resource.swrl.analysis;
import java.util.Map;
import org.eclipse.emf.ecore.EReference;
import org.emftext.language.owl.Feature;
import org.emftext.language.swrl.PropertyAtom;
import org.emftext.language.swrl.resource.swrl.ISwrlReferenceResolveResult;
import org.emftext.language.swrl.resource.swrl.ISwrlReferenceResolver;
public class PropertyAtomPropertyReferenceResolver implements ISwrlReferenceResolver<PropertyAtom, Feature> {
private SwrlDefaultResolverDelegate<PropertyAtom, Feature> delegate = new SwrlDefaultResolverDelegate<PropertyAtom, Feature>();
public void resolve(String identifier, PropertyAtom container, EReference reference, int position, boolean resolveFuzzy, final ISwrlReferenceResolveResult<Feature> result) {
delegate.resolve(identifier, container, reference, position, resolveFuzzy, result);
}
public String deResolve(Feature element, PropertyAtom container, EReference reference) {
return delegate.deResolve(element, container, reference);
}
public void setOptions(Map<?,?> options) {
// no needed
}
}
<file_sep>/Extensions/org.emftext.language.owlcl.resource.owlcl/src/org/emftext/language/owlcl/resource/owlcl/mopp/OwlclBuilder.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owlcl.resource.owlcl.mopp;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EAnnotation;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EcoreFactory;
import org.emftext.language.owlcl.Constraint;
import org.emftext.language.owlcl.OWLCLSpec;
import org.emftext.language.owlcl.Type;
import org.emftext.language.owlcl.resource.owlcl.IOwlclBuilder;
import org.emftext.runtime.owltext.OWLTextEObjectPrinter;
import org.emftext.runtime.owltext.transformation.OWLTransformationConstants;
public class OwlclBuilder implements IOwlclBuilder {
public boolean isBuildingNeeded(URI uri) {
return true;
}
public IStatus build(OwlclResource resource, IProgressMonitor monitor) {
if (resource.getErrors().size() > 0) {
return Status.OK_STATUS;
}
if (resource.getContents().size() == 1) {
OWLCLSpec spec = (OWLCLSpec) resource.getContents().get(0);
cleanMetaclasses(spec.getConstrainedMetamodel());
EList<Constraint> constraints = spec.getConstraints();
for (Constraint c : constraints) {
EClass constrainedMetaclass = c.getConstrainedMetaclass();
EAnnotation anno = EcoreFactory.eINSTANCE.createEAnnotation();
anno.setSource(OWLTransformationConstants.OWL_CONSTRAINT);
String description = OWLTextEObjectPrinter
.getOWLRepresentation(c.getConstraintDescription());
anno.getDetails().put(c.getErrorMsg(), description);
constrainedMetaclass.getEAnnotations().add(anno);
}
EList<Type> types = spec.getTypes();
for (Type type : types) {
EAnnotation anno = EcoreFactory.eINSTANCE.createEAnnotation();
anno.setSource(OWLTransformationConstants.OWL_DEFINITION);
String description = OWLTextEObjectPrinter
.getOWLRepresentation(type.getTypeDescription());
anno.getDetails().put(type.getName(), description);
EList<EAnnotation> eAnnotations = type.getEAnnotations();
for (EAnnotation eAnnotation : eAnnotations) {
anno.getDetails().addAll(eAnnotation.getDetails());
}
EList<EClass> refinedTypes = type.getESuperTypes();
for (EClass refinedType : refinedTypes) {
refinedType.getEAnnotations().add(anno);
}
}
try {
spec.getConstrainedMetamodel().eResource()
.save(Collections.EMPTY_MAP);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return Status.OK_STATUS;
}
private void cleanMetaclasses(EPackage constrainedMetamodel) {
EList<EPackage> eSubpackages = constrainedMetamodel.getESubpackages();
for (EPackage subpackages : eSubpackages) {
cleanMetaclasses(subpackages);
}
EList<EClassifier> classifiers = constrainedMetamodel.getEClassifiers();
for (EClassifier eClassifier : classifiers) {
List<EAnnotation> toRemove = new ArrayList<EAnnotation>();
EList<EAnnotation> eAnnotations = eClassifier.getEAnnotations();
for (EAnnotation eAnnotation : eAnnotations) {
if (eAnnotation.getSource().equals(
OWLTransformationConstants.OWL_DEFINITION)) {
toRemove.add(eAnnotation);
}
if (eAnnotation.getSource().equals(
OWLTransformationConstants.OWL_CONSTRAINT)) {
toRemove.add(eAnnotation);
}
}
eClassifier.getEAnnotations().removeAll(toRemove);
}
}
public IStatus handleDeletion(URI uri, IProgressMonitor monitor) {
return Status.OK_STATUS;
}
}
<file_sep>/Core/org.emftext.language.owl.resource.owl/build.properties
bin.includes = META-INF/,\
.,\
icons/,\
css/,\
plugin.xml,\
lib/owlapi/owlapi-bin.jar
source.. = src/,src-gen/
<file_sep>/Extensions/org.emftext.runtime.owltext.transformation/src/org/emftext/runtime/owltext/transformation/Ecore2Owl.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.runtime.owltext.transformation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.EMap;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EAnnotation;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EDataType;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EEnumLiteral;
import org.eclipse.emf.ecore.ENamedElement;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.emftext.language.owl.AbbreviatedXSDStringLiteral;
import org.emftext.language.owl.Annotation;
import org.emftext.language.owl.AnnotationProperty;
import org.emftext.language.owl.AnnotationValue;
import org.emftext.language.owl.Class;
import org.emftext.language.owl.ClassAtomic;
import org.emftext.language.owl.Conjunction;
import org.emftext.language.owl.DataProperty;
import org.emftext.language.owl.DataPropertyFact;
import org.emftext.language.owl.Datatype;
import org.emftext.language.owl.DatatypeReference;
import org.emftext.language.owl.Description;
import org.emftext.language.owl.DisjointClasses;
import org.emftext.language.owl.Feature;
import org.emftext.language.owl.FeatureReference;
import org.emftext.language.owl.FeatureRestriction;
import org.emftext.language.owl.Frame;
import org.emftext.language.owl.Individual;
import org.emftext.language.owl.LiteralTarget;
import org.emftext.language.owl.Namespace;
import org.emftext.language.owl.NestedDescription;
import org.emftext.language.owl.ObjectProperty;
import org.emftext.language.owl.ObjectPropertyFact;
import org.emftext.language.owl.ObjectPropertyMax;
import org.emftext.language.owl.ObjectPropertyMin;
import org.emftext.language.owl.ObjectPropertyReference;
import org.emftext.language.owl.Ontology;
import org.emftext.language.owl.OntologyDocument;
import org.emftext.language.owl.OwlFactory;
import org.emftext.language.owl.reasoning.EMFTextPelletReasoner;
import org.emftext.language.owl.resource.owl.analysis.custom.CrossResourceIRIResolver;
public class Ecore2Owl {
public Ecore2Owl() {
super();
}
private OwlFactory owlFactory = OwlFactory.eINSTANCE;
private Ontology ontology;
private HashMap<ENamedElement, Frame> eType2owlClass = new LinkedHashMap<ENamedElement, Frame>();
private HashMap<EStructuralFeature, Feature> references2objectProperties = new LinkedHashMap<EStructuralFeature, Feature>();
private int constraintCounter = 0;
private HashMap<EClass, List<EClass>> allSupertypes = new LinkedHashMap<EClass, List<EClass>>();
private HashMap<EClass, List<EClass>> allSubtypes = new LinkedHashMap<EClass, List<EClass>>();
private HashMap<EClass, List<EClass>> directSubtypes = new LinkedHashMap<EClass, List<EClass>>();
private EPackage currentMetamodel;
private HashMap<EPackage, HashMap<ENamedElement, Frame>> importedTypeMaps = new LinkedHashMap<EPackage, HashMap<ENamedElement, Frame>>();
private URI targetURI;
private void addSubtype(EClass key, EClass subtype) {
List<EClass> subtypes = this.allSubtypes.get(key);
if (subtypes == null) {
subtypes = new ArrayList<EClass>();
this.allSubtypes.put(key, subtypes);
}
subtypes.add(subtype);
}
private void addDirectSubtype(EClass directSupertype, EClass subtype) {
List<EClass> subtypes = this.directSubtypes.get(directSupertype);
if (subtypes == null) {
subtypes = new ArrayList<EClass>();
this.directSubtypes.put(directSupertype, subtypes);
}
subtypes.add(subtype);
}
private void addSupertypes(EClass key, List<EClass> supertypes) {
List<EClass> currentSupertypes = this.allSupertypes.get(key);
if (currentSupertypes == null) {
currentSupertypes = new ArrayList<EClass>();
this.allSupertypes.put(key, currentSupertypes);
}
currentSupertypes.addAll(supertypes);
}
private void addTypeMapping(ENamedElement type, Frame owlClass) {
eType2owlClass.put(type, owlClass);
}
private Frame getTypeMapping(ENamedElement type) {
Frame frame = eType2owlClass.get(type);
if (frame == null) {
EPackage eContainer = (EPackage) type.eResource().getContents()
.get(0);
if (eContainer != currentMetamodel) {
HashMap<ENamedElement, Frame> importedMap = this.importedTypeMaps
.get(eContainer);
if (importedMap == null) {
importedMap = addMetamodelImport(eContainer);
this.importedTypeMaps.put(eContainer, importedMap);
}
frame = importedMap.get(type);
}
}
return frame;
}
private void initDatatypes() {
EList<EClassifier> eClassifiers = EcorePackage.eINSTANCE
.getEClassifiers();
for (EClassifier eclassifier : eClassifiers) {
if (eclassifier instanceof EDataType) {
EDataType primitive = (EDataType) eclassifier;
Datatype property = owlFactory.createDatatype();
String typeName = OWLTransformationHelper.getDatatypeMap().get(
primitive.getInstanceClassName());
if (typeName == null)
typeName = primitive.getName();
property.setIri(typeName);
addTypeMapping(primitive, property);
// ontology.getFrames().add(property);
}
}
}
public OntologyDocument transformMetamodel(EPackage metamodel, URI targetURI) {
return transformMetamodel(metamodel, targetURI, Collections.<Ecore2OwlOptions, Object>emptyMap());
}
public OntologyDocument transformMetamodel(EPackage metamodel, URI targetURI, Map<Ecore2OwlOptions, Object> options) {
this.targetURI = targetURI;
currentMetamodel = metamodel;
OntologyDocument d = owlFactory.createOntologyDocument();
ontology = owlFactory.createOntology();
d.setOntology(ontology);
initDatatypes();
initStandardImports(d, metamodel);
ontology.setUri(metamodel.getNsURI());
propagateMetamodel(metamodel, options);
cleanTransitiveImports(ontology);
Resource resource = metamodel.eResource();
ResourceSet resourceSet = null;
if (resource != null) {
resourceSet = resource.getResourceSet();
}
if (targetURI != null) {
saveOntology(targetURI, d, resourceSet);
}
return d;
}
public void cleanTransitiveImports(Ontology ontology) {
List<Ontology> transitiveImports = new ArrayList<Ontology>();
EList<Ontology> imports = ontology.getImports();
for (Ontology o : imports) {
transitiveImports.addAll(CrossResourceIRIResolver.theInstance().calculateTransitiveImports(o));
}
List<Ontology> toRemove = new ArrayList<Ontology>();
List<String> importsUris = new ArrayList<String>();
for (Ontology i : transitiveImports) {
importsUris.add(i.getUri());
}
for (Ontology imported : imports) {
String importUri = imported.getUri();
if (importsUris.contains(importUri)) {
if (CrossResourceIRIResolver.standardNamespaces.values()
.contains(importUri)) {
continue;
}
toRemove.add(imported);
}
}
ontology.getImports().removeAll(toRemove);
}
/**
* Save the given ontology in the resource with the given target URI. If 'resourceSet'
* is null, a new one is created. Otherwise, the resource set is used to create the
* target resource.
*
* @param targetURI the URI where to save the ontology to
* @param d the ontology
* @param resourceSet the resource to use, or null if a new one shall be created
*/
private void saveOntology(URI targetURI, OntologyDocument d, ResourceSet resourceSet) {
ResourceSet resourceSetToUse = new ResourceSetImpl();
if (resourceSet != null) {
resourceSetToUse = resourceSet;
}
Resource documentResource = resourceSetToUse.createResource(targetURI);
documentResource.getContents().add(d);
try {
documentResource.save(null);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private HashMap<ENamedElement, Frame> addMetamodelImport(
EPackage importedMetamodel) {
EPackage rootPackageOfImport = (EPackage) importedMetamodel.eResource()
.getContents().get(0);
Ecore2Owl transformation = new Ecore2Owl();
String importedMetamodelPrefix = importedMetamodel.getNsPrefix();
URI importedTargetURI = null;
if (!(targetURI == null)) {
importedTargetURI = targetURI.trimSegments(1)
.appendSegment(importedMetamodel.getName())
.appendFileExtension("mm").appendFileExtension("owl");
}
OntologyDocument importedDocument = transformation.transformMetamodel(
rootPackageOfImport, importedTargetURI);
OntologyDocument importingDocument = (OntologyDocument) this.ontology
.eContainer();
OwlFactory factory = OwlFactory.eINSTANCE;
Namespace importNamespace = factory.createNamespace();
importNamespace.setPrefix(importedMetamodelPrefix + ":");
importNamespace.setImportedOntology(importedDocument.getOntology());
importingDocument.getNamespace().add(importNamespace);
this.ontology.getImports().add(importedDocument.getOntology());
EList<Frame> frames = importedDocument.getOntology().getFrames();
for (Frame frame : frames) {
if (frame.getIri() != null && frame.getIri().length() > 0) {
Frame declarationFrame = null;
if (frame instanceof Class) {
declarationFrame = factory.createClass();
} else if (frame instanceof ObjectProperty) {
declarationFrame = factory.createObjectProperty();
} else if (frame instanceof DataProperty) {
declarationFrame = factory.createDataProperty();
} else if (frame instanceof Datatype) {
declarationFrame = factory.createDatatype();
} else if (frame instanceof AnnotationProperty) {
declarationFrame = factory.createAnnotationProperty();
} else if (frame instanceof Individual) {
declarationFrame = factory.createIndividual();
}
declarationFrame.setIri(importedMetamodelPrefix + ":"
+ frame.getIri());
ontology.getFrames().add(declarationFrame);
}
}
return transformation.getTypeMappings();
}
private HashMap<ENamedElement, Frame> getTypeMappings() {
return this.eType2owlClass;
}
private void initStandardImports(OntologyDocument d, EPackage metamodel) {
Map<String, String> namespaces = new HashMap<String, String>();
namespaces.putAll(CrossResourceIRIResolver.standardNamespaces);
// namespaces.put("owl2xml", "http://www.w3.org/2006/12/owl2-xml#");
namespaces.put(metamodel.getNsPrefix() + ":", metamodel.getNsURI());
// addSubpackages(metamodel.getNsPrefix() + ":", metamodel.getNsURI(),
// namespaces, metamodel.getESubpackages());
namespaces.put(":", metamodel.getNsURI());
for (String prefix : namespaces.keySet()) {
Namespace namespace = owlFactory.createNamespace();
d.getNamespace().add(namespace);
namespace.setPrefix(prefix);
Ontology o = owlFactory.createOntology();
o.setUri(namespaces.get(prefix));
namespace.setImportedOntology(o);
}
}
// private void addSubpackages(String nsPrefix, String nsURI,
// Map<String, String> namespaces, EList<EPackage> eSubpackages) {
// for (EPackage ePackage : eSubpackages) {
// namespaces.put(nsPrefix + "_" + ePackage.getNsPrefix(), nsURI);
// addSubpackages(nsPrefix + "_" + ePackage.getNsPrefix(), nsURI,
// namespaces, ePackage.getESubpackages());
// }
// }
public OntologyDocument transform(Collection<EObject> eObjects) {
return transform(eObjects, Collections.<Ecore2OwlOptions, Object>emptyMap());
}
public OntologyDocument transform(Collection<EObject> eObjects, Map<Ecore2OwlOptions, Object> options) {
OntologyDocument d = owlFactory.createOntologyDocument();
ontology = owlFactory.createOntology();
initDatatypes();
d.setOntology(ontology);
for (EObject eObject : eObjects) {
// TODO propagate each metamodel only once
EPackage metamodel = eObject.eClass().getEPackage();
while (metamodel.getESuperPackage() != null) {
metamodel = metamodel.getESuperPackage();
}
propagateMetamodel(metamodel, options);
propagateInstances(eObject);
}
cleanTransitiveImports(ontology);
return d;
}
private void propagateInstances(EObject eo) {
HashMap<EObject, Individual> eobject2individual = new HashMap<EObject, Individual>();
TreeIterator<EObject> allContents = eo.eAllContents();
while (allContents.hasNext()) {
EObject instance = allContents.next();
Individual individual = owlFactory.createIndividual();
individual.setIri("eid_" + instance.hashCode());
eobject2individual.put(instance, individual);
Class metaclass = (Class) getTypeMapping(instance.eClass());
ClassAtomic metaclassAtomic = owlFactory.createClassAtomic();
metaclassAtomic.setClazz(metaclass);
individual.getTypes().add(metaclassAtomic);
ontology.getFrames().add(individual);
}
allContents = eo.eAllContents();
while (allContents.hasNext()) {
EObject instance = allContents.next();
Individual contextIndividual = eobject2individual.get(instance);
EList<EStructuralFeature> allStructuralFeatures = instance.eClass()
.getEAllStructuralFeatures();
for (EStructuralFeature structuralFeature : allStructuralFeatures) {
Object get = instance.eGet(structuralFeature);
if (get instanceof EObject) {
Individual i = eobject2individual.get(get);
ObjectPropertyFact fact = owlFactory
.createObjectPropertyFact();
fact.setObjectProperty((ObjectProperty) references2objectProperties
.get(structuralFeature));
fact.setIndividual(i);
contextIndividual.getFacts().add(fact);
} else if (get instanceof String) {
DataPropertyFact fact = owlFactory.createDataPropertyFact();
fact.setDataProperty((DataProperty) references2objectProperties
.get(structuralFeature));
AbbreviatedXSDStringLiteral l = owlFactory
.createAbbreviatedXSDStringLiteral();
l.setValue((String) get);
fact.setLiteral(l);
contextIndividual.getFacts().add(fact);
}
}
}
}
private void propagateMetamodel(EPackage metamodel, Map<Ecore2OwlOptions, Object> options) {
TreeIterator<EObject> allContents = metamodel.eAllContents();
while (allContents.hasNext()) {
EObject elem = allContents.next();
if (elem instanceof EClass) {
EClass eClass = (EClass) elem;
transformEClass(eClass);
EList<EClass> superTypes = eClass.getEAllSuperTypes();
addSupertypes(eClass, superTypes);
for (EClass supertype : superTypes) {
addSubtype(supertype, eClass);
}
EList<EClass> directSuperTypes = eClass.getESuperTypes();
for (EClass directSupertype : directSuperTypes) {
addDirectSubtype(directSupertype, eClass);
}
} else if (elem instanceof EEnum) {
transformEEnum((EEnum) elem);
} else if (elem instanceof EDataType) {
transformEDatatype((EDataType) elem);
}
}
allContents = metamodel.eAllContents();
while (allContents.hasNext()) {
EObject elem = allContents.next();
boolean addPrefix = options.get(Ecore2OwlOptions.PREFIX_PROPERTIES_WITH_CLASSNAME) != Boolean.FALSE;
if (elem instanceof EReference) {
transformEReference((EReference) elem, addPrefix);
}
if (elem instanceof EAttribute) {
transformEAttribute((EAttribute) elem, addPrefix);
}
}
Set<EClass> allClasses = new HashSet<EClass>();
Set<ENamedElement> types = eType2owlClass.keySet();
for (ENamedElement eNamedElement : types) {
if (eNamedElement instanceof EClass) {
EClass eClass = (EClass) eNamedElement;
if (eClass.getESuperTypes().isEmpty())
allClasses.add(eClass);
}
}
addDisjointSubClasses(allClasses);
Set<ENamedElement> keySet = eType2owlClass.keySet();
for (ENamedElement classifier : keySet) {
if (classifier instanceof EClass) {
EClass eclass = (EClass) classifier;
Class owlClass = (Class) getTypeMapping(eclass);
EList<EClass> superTypes = eclass.getESuperTypes();
Conjunction supertypes = owlFactory.createConjunction();
for (EClass superclass : superTypes) {
Class superframe = (Class) getTypeMapping(superclass);
ClassAtomic superClassAtomic = owlFactory
.createClassAtomic();
superClassAtomic.setClazz(superframe);
supertypes.getPrimaries().add(superClassAtomic);
}
addCardinalityConstraintsClasses(eclass, owlClass);
if (supertypes.getPrimaries().size() == 0) {
Class owlThing = (Class) owlFactory.createClass();
owlThing.setIri("owl:Thing");
ClassAtomic superClassAtomic = owlFactory
.createClassAtomic();
superClassAtomic.setClazz(owlThing);
supertypes.getPrimaries().add(superClassAtomic);
}
addOwlDefinitionSupertypes(owlClass, eclass, ontology);
owlClass.getSuperClassesDescriptions().add(supertypes);
addUserDefinedConstraints(eclass, owlClass);
}
}
}
private void addDisjointSubClasses(Set<EClass> classes) {
Set<Set<EClass>> seenSets = new HashSet<Set<EClass>>();
for (EClass eClass : classes) {
Set<EClass> subclasses = getSubclasses(eClass);
Set<EClass> disjoints = new HashSet<EClass>();
disjoints.add(eClass);
List<EClass> directSubtypes = this.directSubtypes.get(eClass);
if (directSubtypes != null && directSubtypes.size() > 1) {
Set<EClass> foundDirectSubtypes = new HashSet<EClass>();
foundDirectSubtypes.addAll(directSubtypes);
addDisjointSubClasses(foundDirectSubtypes);
}
for (EClass disjointCandidate : classes) {
Set<EClass> candidateSubclasses = getSubclasses(disjointCandidate);
// no shared subclasses allowed
if (candidateSubclasses.removeAll(subclasses))
continue;
// superclass already included
if (candidateSubclasses.removeAll(disjoints))
continue;
subclasses.addAll(candidateSubclasses);
disjoints.add(disjointCandidate);
disjoints.removeAll(subclasses);
}
if (disjoints.size() > 1) {
if (seenSets.contains(disjoints))
continue;
seenSets.add(disjoints);
DisjointClasses disjointClasses = owlFactory
.createDisjointClasses();
ontology.getFrames().add(disjointClasses);
for (EClass d : disjoints) {
Class owlClass = (Class) getTypeMapping(d);
ClassAtomic classAtomic = owlFactory.createClassAtomic();
classAtomic.setClazz(owlClass);
disjointClasses.getDescriptions().add(classAtomic);
}
}
}
}
/*
private Set<EClass> getSuperclasses(Set<EClass> classes) {
Set<EClass> superclasses = new HashSet<EClass>();
for (EClass subclass : classes) {
superclasses.addAll(getSuperclasses(subclass));
}
return superclasses;
}
private Set<EClass> getSuperclasses(EClass cls) {
List<EClass> foundSupertypes = this.allSupertypes.get(cls);
Set<EClass> superclasses = new HashSet<EClass>();
if (foundSupertypes != null)
superclasses.addAll(foundSupertypes);
return superclasses;
}
*/
private Set<EClass> getSubclasses(EClass cls) {
Set<EClass> subclasses = new HashSet<EClass>();
List<EClass> foundSubtypes = this.allSubtypes.get(cls);
if (foundSubtypes != null)
subclasses.addAll(foundSubtypes);
return subclasses;
}
private void addOwlDefinitionSupertypes(Class owlClass, EClass eclass,
Ontology ontology) {
EList<EAnnotation> eAnnotations = eclass.getEAnnotations();
for (EAnnotation eAnnotation : eAnnotations) {
if (eAnnotation.getSource().equals(
OWLTransformationConstants.OWL_DEFINITION)
&& eAnnotation.getDetails().size() > 0) {
EMap<String, String> details = eAnnotation.getDetails();
Entry<String, String> definitionEntry = eAnnotation
.getDetails().get(0);
String definition = definitionEntry.getValue();
String className = definitionEntry.getKey();
Class typeClass = owlFactory.createClass();
typeClass.setIri(className);
ontology.getFrames().add(typeClass);
Conjunction conjunction = owlFactory.createConjunction();
typeClass.getEquivalentClassesDescriptions().add(conjunction);
ClassAtomic ca = owlFactory.createClassAtomic();
ca.setClazz(owlClass);
conjunction.getPrimaries().add(ca);
OWLParsingHelper oph = new OWLParsingHelper();
Description definitionDescription = oph.parseSubClassOf(
definition, eclass.eResource());
conjunction.getPrimaries().add(definitionDescription);
for (Entry<String, String> entry : details.subList(1,
details.size())) {
String error = entry.getKey();
String constraint = entry.getValue();
Description constraintDescription = oph.parseSubClassOf(
constraint, eclass.eResource());
if (constraintDescription != null) {
NestedDescription nestedDescription = owlFactory
.createNestedDescription();
nestedDescription.setDescription(constraintDescription);
nestedDescription.setNot(true);
String iriFragment = OWLTransformationHelper
.createValidIri(error);
String iri = "_constraint_" + iriFragment;
createConstraintClass(typeClass, iri, error,
nestedDescription);
}
}
}
}
}
private void addUserDefinedConstraints(EClass eclass, Class owlClass) {
EList<EAnnotation> eAnnotations = eclass.getEAnnotations();
for (EAnnotation eAnnotation : eAnnotations) {
if (eAnnotation.getSource().equals(
OWLTransformationConstants.OWL_CONSTRAINT)) {
EMap<String, String> details = eAnnotation.getDetails();
for (Entry<String, String> entry : details) {
String error = entry.getKey();
String constraint = entry.getValue();
OWLParsingHelper oph = new OWLParsingHelper();
Description constraintDescription = oph.parseSubClassOf(
constraint, eclass.eResource());
if (constraintDescription != null) {
NestedDescription nestedDescription = owlFactory
.createNestedDescription();
nestedDescription.setDescription(constraintDescription);
nestedDescription.setNot(true);
String iriFragment = OWLTransformationHelper
.createValidIri(error);
String iri = "_constraint_" + iriFragment;
createConstraintClass(owlClass, iri, error,
nestedDescription);
}
}
}
}
}
private void addCardinalityConstraintsClasses(EClass eclass,
Class constrainedClass) {
EList<EAttribute> attributes = eclass.getEAttributes();
for (EAttribute attribute : attributes) {
// DataProperty dataProperty = (DataProperty)
Feature f = references2objectProperties.get(attribute);
if (f instanceof DataProperty)// EAttributes
addCardinalityConstraintsClassesForEAttributes(attribute,
(DataProperty) f, constrainedClass);
if (f instanceof ObjectProperty)// EEnum
addCardinalityConstraintsClassesForEReferenceAndEEnum(
attribute, (ObjectProperty) f, constrainedClass);
}
EList<EReference> references = eclass.getEReferences();
for (EReference reference : references) {
ObjectProperty objectProperty = (ObjectProperty) references2objectProperties
.get(reference);
addCardinalityConstraintsClassesForEReferenceAndEEnum(reference,
objectProperty, constrainedClass);
}
}
private void addCardinalityConstraintsClassesForEAttributes(
EAttribute attribute, DataProperty dataProperty,
Class constrainedClass) {
if (attribute.getLowerBound() != 0) {
ObjectPropertyMin minRestriction = owlFactory
.createObjectPropertyMin();
setFeature(minRestriction, dataProperty);
minRestriction.setValue(attribute.getLowerBound());
DatatypeReference primary = owlFactory.createDatatypeReference();
Datatype dataType = (Datatype) getTypeMapping(attribute.getEType());
primary.setTheDatatype(dataType);
minRestriction.setDataPrimary(primary);
NestedDescription nestedDescription = owlFactory
.createNestedDescription();
nestedDescription.setDescription(minRestriction);
nestedDescription.setNot(true);
String constraintID = "_min_" + attribute.getLowerBound() + "_"
+ attribute.getName();
String errorMsg = "The minimal cardinality of '"
+ attribute.getLowerBound() + "' for attribute '"
+ attribute.getName() + "' is not satisfied.";
createConstraintClass(constrainedClass, constraintID, errorMsg,
nestedDescription);
}
if (attribute.getUpperBound() != -1) {
ObjectPropertyMax maxRestriction = owlFactory
.createObjectPropertyMax();
setFeature(maxRestriction, dataProperty);
maxRestriction.setValue(attribute.getUpperBound());
DatatypeReference primary = owlFactory.createDatatypeReference();
Datatype dataType = (Datatype) getTypeMapping(attribute.getEType());
primary.setTheDatatype(dataType);
maxRestriction.setDataPrimary(primary);
NestedDescription nestedDescription = owlFactory
.createNestedDescription();
nestedDescription.setDescription(maxRestriction);
nestedDescription.setNot(true);
String iri = "_max_" + attribute.getUpperBound() + "_"
+ attribute.getName();
String errorMsg = "The maximal cardinality of '"
+ attribute.getUpperBound() + "' for attribute '"
+ attribute.getName() + "' is not satisfied.";
createConstraintClass(constrainedClass, iri, errorMsg,
nestedDescription);
}
}
private void addCardinalityConstraintsClassesForEReferenceAndEEnum(
EStructuralFeature structuralFeature,
ObjectProperty objectProperty, Class constrainedClass) {
if (structuralFeature.getLowerBound() != 0) {
ObjectPropertyMin minRestriction = owlFactory
.createObjectPropertyMin();
setFeature(minRestriction, objectProperty);
ClassAtomic classAtomic = owlFactory.createClassAtomic();
classAtomic.setClazz((Class) getTypeMapping(structuralFeature
.getEType()));
minRestriction.setPrimary(classAtomic);
minRestriction.setValue(structuralFeature.getLowerBound());
NestedDescription nestedDescription = owlFactory
.createNestedDescription();
nestedDescription.setDescription(minRestriction);
nestedDescription.setNot(true);
String iri = "_min_" + structuralFeature.getLowerBound() + "_"
+ structuralFeature.getName();
String errorMsg = "The minimal cardinality of '"
+ structuralFeature.getLowerBound() + "' for reference '"
+ structuralFeature.getName() + "' is not satisfied.";
createConstraintClass(constrainedClass, iri, errorMsg,
nestedDescription);
}
if (structuralFeature.getUpperBound() != -1) {
ObjectPropertyMax maxRestriction = owlFactory
.createObjectPropertyMax();
setFeature(maxRestriction, objectProperty);
ClassAtomic classAtomic = owlFactory.createClassAtomic();
classAtomic.setClazz((Class) getTypeMapping(structuralFeature
.getEType()));
maxRestriction.setPrimary(classAtomic);
maxRestriction.setValue(structuralFeature.getUpperBound());
NestedDescription nestedDescription = owlFactory
.createNestedDescription();
nestedDescription.setDescription(maxRestriction);
nestedDescription.setNot(true);
String iri = "_max_" + structuralFeature.getUpperBound() + "_"
+ structuralFeature.getName();
String errorMsg = "The maximal cardinality of '"
+ structuralFeature.getUpperBound() + "' for reference '"
+ structuralFeature.getName() + "' is not satisfied.";
createConstraintClass(constrainedClass, iri, errorMsg,
nestedDescription);
}
if ((structuralFeature instanceof EReference)
&& (((EReference) structuralFeature).getEOpposite() != null)) {
EReference eOpposite = ((EReference) structuralFeature)
.getEOpposite();
ObjectProperty oppositeProperty = (ObjectProperty) references2objectProperties
.get(eOpposite);
ObjectPropertyReference ref = owlFactory
.createObjectPropertyReference();
ref.setObjectProperty(oppositeProperty);
objectProperty.getInverseProperties().add(ref);
ObjectPropertyReference oppRef = owlFactory
.createObjectPropertyReference();
oppRef.setObjectProperty(objectProperty);
oppositeProperty.getInverseProperties().add(oppRef);
}
}
private void createConstraintClass(Class constrainedClass,
String iriSuffix, String errorMsg, Description constraintDescription) {
Class constraintClass = owlFactory.createClass();
ontology.getFrames().add(constraintClass);
constraintClass.setIri(EMFTextPelletReasoner.CONSTRAINT_CLASS_PREFIX
+ constrainedClass.getIri() + +constraintCounter++ + iriSuffix);
Annotation annotation = owlFactory.createAnnotation();
AbbreviatedXSDStringLiteral stringLiteral = owlFactory
.createAbbreviatedXSDStringLiteral();
stringLiteral.setValue(errorMsg);
LiteralTarget lt = owlFactory.createLiteralTarget();
lt.setLiteral(stringLiteral);
AnnotationValue av = owlFactory.createAnnotationValue();
av.setTarget(lt);
AnnotationProperty annotationProperty = owlFactory
.createAnnotationProperty();
annotationProperty
.setIri(EMFTextPelletReasoner.CONSTRAINT_PROPERTY_NAME);
av.setAnnotationProperty(annotationProperty);
annotation.getAnnotationValues().add(av);
constraintClass.getAnnotations().add(annotation);
ClassAtomic constrainedClassAtomic = owlFactory.createClassAtomic();
constrainedClassAtomic.setClazz(constrainedClass);
Conjunction and = owlFactory.createConjunction();
and.getPrimaries().add(constrainedClassAtomic);
constraintClass.getEquivalentClassesDescriptions().add(and);
and.getPrimaries().add(constraintDescription);
}
private void setFeature(FeatureRestriction restriction, Feature feature) {
FeatureReference reference = OwlFactory.eINSTANCE
.createFeatureReference();
reference.setFeature(feature);
restriction.setFeatureReference(reference);
}
private void transformEAttribute(EAttribute elem, boolean addPrefix) {
if (elem.getEAttributeType() instanceof EEnum) { // EEnum
ObjectProperty o = owlFactory.createObjectProperty();
o.setIri(OWLTransformationHelper
.getSimpleFeatureIdentificationIRI(elem, addPrefix));
ontology.getFrames().add(o);
Class rangeClass = (Class) getTypeMapping(elem.getEType());
ClassAtomic rangeClassAtomic = owlFactory.createClassAtomic();
rangeClassAtomic.setClazz(rangeClass);
o.getPropertyRange().add(rangeClassAtomic);
Class domainClass = (Class) getTypeMapping(elem
.getEContainingClass());
ClassAtomic domainClassAtomic = owlFactory.createClassAtomic();
domainClassAtomic.setClazz(domainClass);
o.getDomain().add(domainClassAtomic);
// is checked using cardinality constraints
// if (elem.getUpperBound() == 1)
// o.getCharacteristics().add(Characteristic.FUNCTIONAL);
references2objectProperties.put(elem, o);
} else {// EAttribute
DataProperty d = owlFactory.createDataProperty();
ontology.getFrames().add(d);
d.setIri(OWLTransformationHelper
.getSimpleFeatureIdentificationIRI(elem, addPrefix));
Class domainClass = (Class) getTypeMapping(elem
.getEContainingClass());
ClassAtomic domainClassAtomic = owlFactory.createClassAtomic();
domainClassAtomic.setClazz(domainClass);
d.getDomain().add(domainClassAtomic);
elem.getEAttributeType();
DatatypeReference dtr = owlFactory.createDatatypeReference();
Datatype dataType = (Datatype) getTypeMapping(elem
.getEAttributeType());
dtr.setTheDatatype(dataType);
d.getRange().add(dtr);
// is checked using cardinality constraints
// if (elem.getUpperBound() == 1)
// d.setCharacteristic(Characteristic.FUNCTIONAL);
references2objectProperties.put(elem, d);
}
}
private void transformEReference(EReference elem, boolean addPrefix) {
ObjectProperty o = owlFactory.createObjectProperty();
o.setIri(OWLTransformationHelper.getSimpleFeatureIdentificationIRI(elem, addPrefix));
ontology.getFrames().add(o);
Class rangeClass = (Class) getTypeMapping(elem.getEType());
ClassAtomic rangeClassAtomic = owlFactory.createClassAtomic();
rangeClassAtomic.setClazz(rangeClass);
o.getPropertyRange().add(rangeClassAtomic);
Class domainClass = (Class) getTypeMapping(elem.getEContainingClass());
ClassAtomic domainClassAtomic = owlFactory.createClassAtomic();
domainClassAtomic.setClazz(domainClass);
o.getDomain().add(domainClassAtomic);
// is checked using cardinality constraints
// if (elem.getUpperBound() == 1)
// o.getCharacteristics().add(Characteristic.FUNCTIONAL);
references2objectProperties.put(elem, o);
}
private void transformEEnum(EEnum elem) {
Class d = owlFactory.createClass();
ontology.getFrames().add(d);
d.setIri(OWLTransformationHelper.getSimpleClassIdentificationIRI(elem));
addTypeMapping(elem, d);
Conjunction description = owlFactory.createConjunction();
d.getSuperClassesDescriptions().add(description);
EList<EEnumLiteral> literals = elem.getELiterals();
for (EEnumLiteral eEnumLiteral : literals) {
transformEEnumLiteral(eEnumLiteral);
Class type = (Class) getTypeMapping(eEnumLiteral);
ClassAtomic ca = owlFactory.createClassAtomic();
ca.setClazz(type);
description.getPrimaries().add(ca);
}
}
private void transformEEnumLiteral(EEnumLiteral eEnumLiteral) {
// Individual individual = owlFactory.createIndividual();
// individual.setIri(eEnumLiteral.toString());
Class c = owlFactory.createClass();
ontology.getFrames().add(c);
c.setIri(OWLTransformationHelper.getSimpleClassIdentificationIRI(eEnumLiteral));
addTypeMapping(eEnumLiteral, c);
}
private void transformEDatatype(EDataType elem) {
Datatype d = owlFactory.createDatatype();
ontology.getFrames().add(d);
d.setIri(OWLTransformationHelper.getSimpleClassIdentificationIRI(elem));
addTypeMapping(elem, d);
}
private void transformEClass(EClass elem) {
Class c = owlFactory.createClass();
ontology.getFrames().add(c);
c.setIri(OWLTransformationHelper.getSimpleClassIdentificationIRI(elem));
addTypeMapping(elem, c);
}
}
<file_sep>/Extensions/org.emftext.language.sparql.test/src/org/emftext/language/sparql/test/AbstractSparqlTestCase.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.sparql.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.Map;
import junit.framework.TestCase;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.Resource.Diagnostic;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.emftext.language.sparql.SparqlQueries;
import org.emftext.language.sparql.resource.sparql.mopp.RqResource;;
public abstract class AbstractSparqlTestCase extends TestCase {
private static final String SPARQL_FILE_EXTENSION = ".sparql";
private void assertSuccessfulParsing(Resource resource) {
print(resource.getErrors());
print(resource.getWarnings());
assertEquals(0, resource.getErrors().size());
assertEquals(0, resource.getWarnings().size());
}
private void print(EList<Diagnostic> diagnostics) {
for (Diagnostic diagnostic : diagnostics) {
System.out.println(diagnostic.getMessage());
}
}
protected String getFileExtension() {
return SPARQL_FILE_EXTENSION;
}
protected abstract String getTestInputFolder();
protected ResourceSet getResourceSet() {
return new ResourceSetImpl();
}
protected SparqlQueries loadResource(URI uri) throws IOException {
RqResource resource = tryToLoadResource(uri);
assertEquals("The resource should have one content element.", 1,
resource.getContents().size());
EObject content = resource.getContents().get(0);
assertTrue("File '" + uri.toFileString()
+ "' was parsed to SparqlQueries.",
content instanceof SparqlQueries);
SparqlQueries cUnit = (SparqlQueries) content;
assertNotNull(cUnit);
assertSuccessfulParsing(cUnit.eResource());
return cUnit;
}
protected RqResource loadResourceWithoutAssert(URI uri) throws IOException {
RqResource resource = tryToLoadResource(uri);
if (resource.getContents().size() == 0) {
//System.out.println("tryToLoadResource(" + uri.lastSegment() + ") Can't get any content");
return null;
}
/*EObject content = resource.getContents().get(0);
SparqlQueries cUnit = (SparqlQueries) content;
if (cUnit == null) System.out.println("content is null");
*/
//print(resource.getErrors());
//print(resource.getWarnings());
return resource;
}
protected RqResource tryToLoadResource(URI uri) throws IOException {
RqResource resource = new RqResource(uri);
resource.load(Collections.EMPTY_MAP);
for (Diagnostic diagnostic : resource.getErrors()) {
System.out.println("tryToLoadResource(" + uri.lastSegment() + ") found error in resource " + diagnostic.getMessage() + "(" + diagnostic.getLine() + "," + diagnostic.getColumn() + ")");
}
return resource;
}
private SparqlQueries loadResource(
String filePath) throws IOException {
return loadResource(URI.createFileURI(filePath));
}
protected SparqlQueries parseResource(String filename,
String inputFolderName) throws Exception {
return parseResource(new File(filename), inputFolderName);
}
protected SparqlQueries parseResource(File inputFile,
String inputFolder) throws IOException {
File file = new File(inputFolder, inputFile.getPath());
assertTrue("File " + file + " should exist.", file.exists());
return loadResource(file.getCanonicalPath());
}
public RqResource load(File cFile) throws IOException {
return load(new FileInputStream(cFile));
}
public RqResource load(InputStream inputStream) throws IOException {
Map<?, ?> options = Collections.EMPTY_MAP;
RqResource resource = new RqResource();
resource.load(inputStream, options);
inputStream.close();
return resource;
}
protected void assertType(EObject object, java.lang.Class<?> expectedType) {
assertTrue("The object should be not empty", object!=null);
assertTrue("The object should have type '"
+ expectedType.getSimpleName() + "', but was "
+ object.getClass().getSimpleName(), expectedType
.isInstance(object));
}
protected SparqlQueries assertParsesToSparqlQueries(
String typename)throws Exception {
return parsesToSparqlQueries(typename+getFileExtension(), getTestInputFolder());
}
protected SparqlQueries parsesToSparqlQueries(
String filename, String inputFolder)throws Exception {
EObject model = parseResource(filename, inputFolder);
assertType(model, SparqlQueries.class);
if (model instanceof SparqlQueries) {
return (SparqlQueries)model;
}
else {
return null;
}
}
/*
//parses the CompilationUnit with one Namespace to one Class
protected org.emftext.language.c_sharp.classes.Class assertParseToClass(
String typename,
String expectedClassName) throws Exception {
SparqlQueries cUnit = assertParsesToCompilationUnit(typename);
List<NamespaceMemberDeclaration> nmd = cUnit.getNamespaceMemberDeclaration();
assertMemberCount(nmd.get(0), 1);
return (Class)((Namespace)nmd.get(0)).getNamespaceBody().getNamespaceMemberDeclaration().get(0);
}
//iterate through the Object-Tree until the first hit(type) is found
protected EObject isSpecialTypeOrIterate(java.lang.Class<?> type, EObject eObj){
if(type.isInstance(eObj)) return eObj;
for(EObject eObjLeaf: eObj.eContents()){
EObject result = isSpecialTypeOrIterate(type, eObjLeaf);
if(type.isInstance(result)) return result;
}
return null;
}
*/
/*
//Funktion die, je nach Klassenart, die beinhaltenden Member zaehlen soll
protected void assertMemberCount(
EObject classtyp,
int expectedCount) {
String name = classtyp.toString();
if (classtyp instanceof NamedElement) {
name = ((NamedElement) classtyp).getName();
}
int count = -1;
if (classtyp instanceof CompilationUnit) {
count = 0;
count += ((CompilationUnit)classtyp).getUsingDirectives().size();
count += ((CompilationUnit)classtyp).getNamespaceMemberDeclaration().size();
}
if (classtyp instanceof Namespace) {
name = namespaceOrTypeNameToString(((Namespace) classtyp).getNamespaceName());
count = 0;
count += ((Namespace)classtyp).getNamespaceBody().getNamespaceMemberDeclaration().size();
count += ((Namespace)classtyp).getNamespaceBody().getUsingDirectives().size();
}
if (classtyp instanceof Class) {
count = 0;
count += ((Class)classtyp).getClassMemberDeclarations().size();
}
assertEquals(name + " should have " + expectedCount
+ " member(s).", expectedCount, count);
}
//setzt die Teile der Identifier zu einem vollstaendigen String zusammen
protected String namespaceOrTypeNameToString(
NamespaceOrTypeName identifier){
String puffer = "";
for(Identifier part: identifier.getParts()){
puffer += part.getName().toString() +".";
}
return puffer.substring( 0, puffer.length()-1);
}
protected void assertIdentifierName(
NamespaceOrTypeName identifier,
String expectedName) {
assertEquals(expectedName, namespaceOrTypeNameToString(identifier));
}
*/
}
<file_sep>/Core/org.emftext.language.owl.reasoning/src/org/emftext/language/owl/reasoning/OwlReasoningBuilder.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owl.reasoning;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.Map;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource.Diagnostic;
import org.eclipse.emf.workspace.util.WorkspaceSynchronizer;
import org.emftext.language.owl.IRIIdentified;
import org.emftext.language.owl.resource.owl.IOwlBuilder;
import org.emftext.language.owl.resource.owl.IOwlProblem;
import org.emftext.language.owl.resource.owl.IOwlQuickFix;
import org.emftext.language.owl.resource.owl.IOwlTextDiagnostic;
import org.emftext.language.owl.resource.owl.OwlEProblemSeverity;
import org.emftext.language.owl.resource.owl.OwlEProblemType;
import org.emftext.language.owl.resource.owl.mopp.OwlBuilderAdapter;
import org.emftext.language.owl.resource.owl.mopp.OwlMarkerHelper;
import org.emftext.language.owl.resource.owl.mopp.OwlPlugin;
import org.emftext.language.owl.resource.owl.mopp.OwlResource;
import org.emftext.language.owl.resource.owl.util.OwlStreamUtil;
public class OwlReasoningBuilder extends OwlBuilderAdapter implements
IOwlBuilder {
private EMFTextOWLReasoner reasoner;
public OwlReasoningBuilder() {
super();
this.reasoner = new EMFTextPelletReasoner();
}
private void mark(OwlResource resource) {
mark(resource, resource.getErrors());
mark(resource, resource.getWarnings());
}
private void mark(OwlResource resource, EList<Diagnostic> diagnostics) {
for (Diagnostic diagnostic : diagnostics) {
if (diagnostic instanceof IOwlTextDiagnostic) {
new OwlMarkerHelper().mark(resource,
(IOwlTextDiagnostic) diagnostic);
}
}
}
public void validateOWL(String content, OwlResource resource) {
try {
Map<String, String> inconsistentOWLObjects;
// System.out.println(content);
try {
inconsistentOWLObjects = reasoner
.getInconsistentFrames(content);
} catch (final Exception e) {
resource.addProblem(new IOwlProblem() {
public OwlEProblemType getType() {
return OwlEProblemType.BUILDER_ERROR;
}
public String getMessage() {
return e.getMessage();
}
public Collection<IOwlQuickFix> getQuickFixes() {
return null;
}
public OwlEProblemSeverity getSeverity() {
return OwlEProblemSeverity.ERROR;
}
}, resource.getContents().get(0));
return;
}
TreeIterator<EObject> allContents = resource.getAllContents();
while (allContents.hasNext()) {
EObject next = allContents.next();
if (next instanceof IRIIdentified) {
final IRIIdentified c = ((IRIIdentified) next);
final String error = inconsistentOWLObjects.get(c.getIri());
if (error != null) {
resource.addProblem(new IOwlProblem() {
public OwlEProblemType getType() {
return OwlEProblemType.BUILDER_ERROR;
}
public String getMessage() {
return " '" + c.getIri()
+ "' is inconsistent: " + error;
}
public Collection<IOwlQuickFix> getQuickFixes() {
return null;
}
public OwlEProblemSeverity getSeverity() {
return OwlEProblemSeverity.ERROR;
}
}, next);
}
}
}
} catch (Exception e) {
OwlPlugin.logError("Exception while reasoning over OWL file.", e);
}
}
public boolean isBuildingNeeded(URI uri) {
return true;
}
public IStatus handleDeletion(URI uri, IProgressMonitor monitor) {
return Status.OK_STATUS;
}
public IOwlBuilder getBuilder() {
return OwlReasoningBuilder.this;
}
public IStatus build(OwlResource resource, IProgressMonitor monitor) {
new OwlMarkerHelper().unmark(resource, OwlEProblemType.BUILDER_ERROR);
IFile file = WorkspaceSynchronizer.getFile(resource);
InputStream stream;
try {
stream = file.getContents();
String content = OwlStreamUtil.getContent(stream);
validateOWL(content, resource);
mark(resource);
} catch (CoreException e) {
OwlPlugin.logError("Exception while reasoning over OWL file.", e);
} catch (IOException e) {
OwlPlugin.logError("Exception while reasoning over OWL file.", e);
}
return Status.OK_STATUS;
}
}
<file_sep>/Extensions/org.emftext.runtime.owltext.transformation/src/org/emftext/runtime/owltext/transformation/OWLParsingHelper.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.runtime.owltext.transformation;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.Collections;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.emftext.language.owl.Class;
import org.emftext.language.owl.Description;
import org.emftext.language.owl.OntologyDocument;
import org.emftext.language.owl.resource.owl.mopp.OwlResource;
import org.emftext.language.owl.resource.owl.mopp.OwlResourceFactory;
public class OWLParsingHelper {
static {
Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap().put(
"owl", new OwlResourceFactory());
}
public Description parseSubClassOf(String superClassDescription,
Resource resource) {
try {
String toParse = "Ontology: <http://dummy/ontology.owl>\n"
+ "Class: DummyElement \n" + "\tSubClassOf: ";
toParse += "(" + superClassDescription + ")";
InputStream inputStream = new ByteArrayInputStream(
toParse.getBytes());
ResourceSet resourceSet;
// = resource.getResourceSet();
// if (resourceSet == null)
resourceSet = new ResourceSetImpl();
URI tempUri = URI.createFileURI("_" + inputStream.hashCode())
.appendFileExtension("owl");
OwlResource res = (OwlResource) resourceSet.createResource(tempUri);
res.load(inputStream, Collections.EMPTY_MAP);
EList<EObject> contents = res.getContents();
if (contents.size() == 1
&& contents.get(0) instanceof OntologyDocument) {
OntologyDocument od = (OntologyDocument) contents.get(0);
if (od.getOntology().getFrames().size() == 1
&& od.getOntology().getFrames().get(0) instanceof Class) {
Class dummyClass = (Class) od.getOntology().getFrames()
.get(0);
if (dummyClass.getSuperClassesDescriptions().size() == 1
&& dummyClass.getSuperClassesDescriptions().get(0) instanceof Description) {
return (Description) dummyClass
.getSuperClassesDescriptions().get(0);
}
}
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
<file_sep>/Core/org.emftext.language.owl.resource.owl/src/org/emftext/language/owl/resource/owl/analysis/OntologyImportsReferenceResolver.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owl.resource.owl.analysis;
import java.util.Map;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.resource.Resource;
import org.emftext.language.owl.Ontology;
import org.emftext.language.owl.loading.OntologyLoadExeption;
import org.emftext.language.owl.loading.RemoteLoader;
import org.emftext.language.owl.resource.owl.IOwlReferenceResolveResult;
import org.emftext.language.owl.resource.owl.IOwlReferenceResolver;
import org.emftext.language.owl.resource.owl.analysis.custom.CrossResourceIRIResolver;
public class OntologyImportsReferenceResolver
implements
IOwlReferenceResolver<org.emftext.language.owl.Ontology, org.emftext.language.owl.Ontology> {
private RemoteLoader remoteLoader = CrossResourceIRIResolver.theInstance()
.getRemoteLoader();
public java.lang.String deResolve(
org.emftext.language.owl.Ontology element,
org.emftext.language.owl.Ontology container,
org.eclipse.emf.ecore.EReference reference) {
Resource eResource = element.eResource();
if (eResource == null) return element.getUri();
URI uri = eResource.getURI();
if (uri.isFile() || uri.isPlatform()) {
return "<" + uri + ">";
}
return "<" + element.getUri() + ">";
}
public void resolve(java.lang.String identifier,
org.emftext.language.owl.Ontology container,
org.eclipse.emf.ecore.EReference reference, int position,
boolean resolveFuzzy,
IOwlReferenceResolveResult<org.emftext.language.owl.Ontology> result) {
Ontology loadedOntology;
try {
loadedOntology = remoteLoader.loadOntology(identifier, container);
} catch (OntologyLoadExeption e) {
result.setErrorMessage(e.getMessage());
return;
}
if (loadedOntology != null) {
result.addMapping(identifier, loadedOntology);
}
}
public void setOptions(Map<?, ?> options) {
}
}
<file_sep>/Extensions/org.emftext.ontomopp.modelsync.test/src/org/emftext/ontomopp/modelsync/test/OWLizerTest.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.ontomopp.modelsync.test;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.LinkedHashMap;
import java.util.Map;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
import org.emftext.language.owl.resource.owl.mopp.OwlResourceFactory;
import org.emftext.runtime.owltext.transformation.Ecore2Owl;
import org.emftext.runtime.owltext.transformation.Ecore2OwlOptions;
import org.junit.Before;
import org.junit.Test;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
public class OWLizerTest {
private static final String TESTCASE_NAME = "owlizer-test";
private OWLOntologyManager manager;
@Before
public void setUp() {
manager = OWLManager.createOWLOntologyManager();
}
@Test
public void transformMetamodels() {
transformAndCheck("petrinet");
transformAndCheck("rails");
}
private void transformAndCheck(String name) {
transformToOWL(name);
loadAndCheckForConsistency(name);
}
private void loadAndCheckForConsistency(String name) {
URI uri = getOutputModelURI(TESTCASE_NAME, getOutFileName(name), "owl");
// load ontologies and check for consistency
OWLOntology loadedOntology = new OWLTestHelper().loadOntology(manager, uri);
PelletReasoner reasoner = new OWLTestHelper().createReasoner(loadedOntology);
assertTrue("Ontologies must be consistent", reasoner.isConsistent());
}
private void transformToOWL(String language) {
ResourceSet rs = getResourceSet();
Ecore2Owl transformer = new Ecore2Owl();
Map<Ecore2OwlOptions, Object> options = new LinkedHashMap<Ecore2OwlOptions, Object>();
options.put(Ecore2OwlOptions.PREFIX_PROPERTIES_WITH_CLASSNAME, Boolean.TRUE);
String relativePath = "../org.emftext.language." + language + "/metamodel/" + language + ".ecore";
File metamodelFile = new File(relativePath).getAbsoluteFile();
Resource metamodelResource = rs.getResource(URI.createFileURI(metamodelFile.getAbsolutePath()), true);
EPackage metamodel = (EPackage) metamodelResource.getContents().get(0);
URI targetURI = getOutputModelURI(TESTCASE_NAME, getOutFileName(language), "owl");
transformer.transformMetamodel(metamodel, targetURI);
}
private String getOutFileName(String language) {
return language + "-metamodel";
}
private URI getOutputModelURI(String testcaseName, String modelName, String extension) {
File outputFolder = new File("output" + File.separator + testcaseName);
outputFolder.mkdirs();
URI targetURI = URI.createFileURI(outputFolder.getAbsolutePath() + File.separator + modelName + "." + extension);
return targetURI;
}
private ResourceSet getResourceSet() {
// this is just to register the Ecore language
EcorePackage.eINSTANCE.getEClass();
ResourceSet rs = new ResourceSetImpl();
Map<String, Object> extensionToFactoryMap = rs.getResourceFactoryRegistry().getExtensionToFactoryMap();
extensionToFactoryMap.put("ecore", new XMIResourceFactoryImpl());
extensionToFactoryMap.put("owl", new OwlResourceFactory());
return rs;
}
}
<file_sep>/Core/org.emftext.language.owl.resource.owl/src/org/emftext/language/owl/resource/owl/analysis/NamespaceImportedOntologyReferenceResolver.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owl.resource.owl.analysis;
import java.util.ArrayList;
import java.util.List;
import org.emftext.language.owl.Ontology;
import org.emftext.language.owl.OntologyDocument;
import org.emftext.language.owl.loading.OntologyLoadExeption;
import org.emftext.language.owl.loading.RemoteLoader;
import org.emftext.language.owl.resource.owl.OwlEProblemType;
import org.emftext.language.owl.resource.owl.analysis.custom.CrossResourceIRIResolver;
import org.emftext.language.owl.resource.owl.mopp.OwlResource;
public class NamespaceImportedOntologyReferenceResolver
implements
org.emftext.language.owl.resource.owl.IOwlReferenceResolver<org.emftext.language.owl.Namespace, org.emftext.language.owl.Ontology> {
private RemoteLoader remoteLoader = CrossResourceIRIResolver.theInstance()
.getRemoteLoader();
public void resolve(
java.lang.String identifier,
org.emftext.language.owl.Namespace container,
org.eclipse.emf.ecore.EReference reference,
int position,
boolean resolveFuzzy,
final org.emftext.language.owl.resource.owl.IOwlReferenceResolveResult<org.emftext.language.owl.Ontology> result) {
OntologyDocument ontologyDocument = (OntologyDocument) container
.eContainer();
List<Ontology> imports = new ArrayList<Ontology>();
imports.addAll(ontologyDocument.getOntology().getImports());
imports.addAll(CrossResourceIRIResolver.theInstance()
.calculateTransitiveImports(ontologyDocument.getOntology()));
if (!identifier.endsWith("#")) {
((OwlResource) container.eResource())
.addWarning(
"URIs of imported namespaces should end with \"#\", to allow for resolving its declarations by iri",
OwlEProblemType.ANALYSIS_PROBLEM, container);
}
OntologyDocument document = (OntologyDocument) container.eContainer();
if ((document.getOntology().getUri() + "#").equals(identifier)) {
result.addMapping(identifier, document.getOntology());
return;
}
for (Ontology ontology : imports) {
if (identifier.equals(ontology.getUri())
|| identifier.equals(ontology.getUri() + "#")) {
result.addMapping(identifier, ontology);
return;
}
}
if (result.getMappings() == null || result.getMappings().isEmpty()) {
Ontology loadedOntology;
try {
loadedOntology = remoteLoader.loadOntology(identifier,
container);
} catch (OntologyLoadExeption e) {
result.setErrorMessage(e.getMessage());
return;
}
if (loadedOntology != null) {
ontologyDocument.getOntology().getImports().add(loadedOntology);
result.addMapping(identifier, loadedOntology);
}
}
}
public java.lang.String deResolve(
org.emftext.language.owl.Ontology element,
org.emftext.language.owl.Namespace container,
org.eclipse.emf.ecore.EReference reference) {
String uri = element.getUri();
if (!uri.endsWith("#")) {
if (uri.endsWith(">")) {
uri = uri.substring(0, uri.length() - 1);
uri += "#>";
} else {
uri = uri.substring(0, uri.length()) + "#";
}
}
if (!uri.startsWith("<") && !uri.endsWith(">"))
uri = "<" + uri + ">";
return uri;
}
public void setOptions(java.util.Map<?, ?> options) {
// TODO save options in a field or leave method empty if this resolver
// does not depend on any option
}
}
<file_sep>/OWLText Languages/org.owltext.language.petrinets.resource.petrinets/src/org/owltext/language/petrinets/resource/petrinets/mopp/PetrinetsBuilder.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
/**
* <copyright>
* </copyright>
*
*
*/
package org.owltext.language.petrinets.resource.petrinets.mopp;
import org.emftext.runtime.owltext.OWLTextValidationMarker;
public class PetrinetsBuilder implements org.owltext.language.petrinets.resource.petrinets.IPetrinetsBuilder {
public boolean isBuildingNeeded(org.eclipse.emf.common.util.URI uri) {
return true;
}
public org.eclipse.core.runtime.IStatus build(org.owltext.language.petrinets.resource.petrinets.mopp.PetrinetsResource resource, org.eclipse.core.runtime.IProgressMonitor monitor) {
OWLTextValidationMarker ovm = new OWLTextValidationMarker();
ovm.annotateValidationResults(resource);
return org.eclipse.core.runtime.Status.OK_STATUS;
}
}
<file_sep>/README.md
OntoMoPP
========
Semantic Web languages and bridges built with EMF and EMFText.
Setup and installation instructions:
<insert here>
<file_sep>/Extensions/org.emftext.language.owlcl/metamodel/owlcl.cs
SYNTAXDEF owlcl
FOR <http://www.emftext.org/language/owlcl>
START OWLCLSpec
IMPORTS {
owl : <http://org.emftext/owl.ecore> WITH SYNTAX owl <../../org.emftext.language.owl/metamodel/owl.cs>
}
OPTIONS {
licenceHeader ="../../org.dropsbox/licence.txt";
reloadGeneratorModel = "true";
usePredefinedTokens = "false";
generateCodeFromGeneratorModel = "false";
//defaultTokenName = "IDENTIFIER";
tokenspace = "1";
overrideBuilder = "false" ;
overrideManifest = "false" ;
disableLaunchSupport = "true";
disableDebugSupport = "true";
}
TOKENS {
DEFINE SL_COMMENT $'//'(~('\n'|'\r'|'\uffff'))* $ ;
DEFINE ML_COMMENT $'/*'.*'*/'$ ;
}
TOKENSTYLES {
"STRING_LITERAL" COLOR #2A00FF;
"SL_COMMENT", "ML_COMMENT" COLOR #00bb00;
}
RULES {
OWLCLSpec ::= "import" constrainedMetamodel[STRING_LITERAL] ("refinements:" "{" types* "}")? constraints*;
Type ::= "type" name[IRI] "refines" eSuperTypes[IRI] ":" typeDescription;
Constraint ::= constrainedMetaclass[IRI] "message" errorMsg[STRING_LITERAL] ":" constraintDescription ";";
}<file_sep>/Extensions/org.emftext.language.swrl.test/src/org/emftext/language/swrl/test/NewFileContentTest.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.swrl.test;
import static org.junit.Assert.assertNotNull;
import org.emftext.language.swrl.resource.swrl.mopp.SwrlMetaInformation;
import org.junit.Test;
public class NewFileContentTest {
@Test
public void test() {
String newFileContent = new SwrlMetaInformation().getNewFileContentProvider().getNewFileContent("new");
assertNotNull(newFileContent);
}
}
<file_sep>/Extensions/org.emftext.runtime.owltext.test/metamodel/owlTextTestLanguage.text.cs
SYNTAXDEF fea
FOR <http://org.owltext/feature>
START MandatoryFeature, OptionalFeature
OPTIONS {
licenceHeader ="../../org.dropsbox/licence.txt";
resourcePluginID="org.emftext.runtime.owltext.test";
resourceUIPluginID="org.emftext.runtime.owltext.ui.test";
overrideManifest="false";
overrideUIManifest="false";
disableLaunchSupport = "true";
disableDebugSupport = "true";
}
TOKENS {
DEFINE COMMENT$'//'(~('\n'|'\r'|'\uffff'))*$;
DEFINE OPERATOR$'+'|'-'|'*'|'/'$;
//DEFINE INTEGER$('-')?('1'..'9')('0'..'9')*|'0'$;
//DEFINE FLOAT$('-')?(('1'..'9') ('0'..'9')* | '0') '.' ('0'..'9')+ $;
}
TOKENSTYLES {
"+Feature" COLOR #7F0055, BOLD;
"-Feature" COLOR #7F0055, BOLD;
}
RULES {
MandatoryFeature ::= "+Feature" name['"','"'] annotation?
comments['<','>']* anyLiterals* ("{" children* "}")?; // operator[OPERATOR]?
OptionalFeature ::= "-Feature" name['"','"']
("{" children* "}")?
;
Annotation ::= value['[',']'];
AnyInt ::= literal['$','$'];
AnyBigInteger ::= literal['$b','$b'];
AnyLong ::= literal['$l','$l'];
AnyShort ::= literal['$s','$s'];
AnyBigDecimal ::= literal['$d','$d'];
AnyFloat ::= literal['%','%'];
AnyDouble ::= literal['%d','%d'];
AnyBoolean ::= literal['&','&'];
AnyChar ::= literal['c','c'];
AnyByte ::= literal['b','b'];
//AnyDate ::= literal['_','_'];
}<file_sep>/Extensions/org.emftext.language.swrl.util/src/org/emftext/language/swrl/util/SWRLRuleBuilder.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.swrl.util;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.emftext.language.owl.BooleanLiteral;
import org.emftext.language.owl.Class;
import org.emftext.language.owl.ClassAtomic;
import org.emftext.language.owl.DataProperty;
import org.emftext.language.owl.Description;
import org.emftext.language.owl.Feature;
import org.emftext.language.owl.IRIIdentified;
import org.emftext.language.owl.Literal;
import org.emftext.language.owl.ObjectProperty;
import org.emftext.language.owl.OntologyDocument;
import org.emftext.language.swrl.Atom;
import org.emftext.language.swrl.DLiteral;
import org.emftext.language.swrl.DObject;
import org.emftext.language.swrl.DVariable;
import org.emftext.language.swrl.DescriptionAtom;
import org.emftext.language.swrl.DifferentFromAtom;
import org.emftext.language.swrl.IObject;
import org.emftext.language.swrl.IVariable;
import org.emftext.language.swrl.PropertyAtom;
import org.emftext.language.swrl.Rule;
import org.emftext.language.swrl.SWRLDocument;
import org.emftext.language.swrl.SameAsAtom;
import org.emftext.language.swrl.UnknownObject;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDataProperty;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.SWRLAtom;
import org.semanticweb.owlapi.model.SWRLDArgument;
import org.semanticweb.owlapi.model.SWRLIArgument;
import org.semanticweb.owlapi.model.SWRLRule;
/**
* A class that transforms a SWRLDocument into a SWRL rule that can be used
* with the OWL API.
*/
public class SWRLRuleBuilder {
private OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
private OWLDataFactory factory = manager.getOWLDataFactory();
public List<SWRLRule> getRules(SWRLDocument document) {
List<Rule> rules = document.getRules();
List<SWRLRule> result = new ArrayList<SWRLRule>();
for (Rule rule : rules) {
result.add(getRule(rule));
}
return result;
}
private SWRLRule getRule(Rule rule) {
List<Atom> body = rule.getAntecedent().getBody();
List<Atom> head = rule.getConsequent().getBody();
Set<SWRLAtom> bodyAtoms = getSWRLAtoms(body);
Set<SWRLAtom> headAtoms = getSWRLAtoms(head);
return factory.getSWRLRule(bodyAtoms, headAtoms);
}
private Set<SWRLAtom> getSWRLAtoms(List<Atom> atoms) {
Set<SWRLAtom> result = new LinkedHashSet<SWRLAtom>();
for (Atom atom : atoms) {
result.add(getSWRLAtom(atom));
}
return result;
}
private SWRLAtom getSWRLAtom(Atom atom) {
if (atom instanceof DescriptionAtom) {
DescriptionAtom descriptionAtom = (DescriptionAtom) atom;
Description description = descriptionAtom.getDescription();
if (description instanceof ClassAtomic) {
ClassAtomic classAtomic = (ClassAtomic) description;
IObject object = descriptionAtom.getObject();
SWRLIArgument argument = getSWRLArgument(object);
Class clazz = classAtomic.getClazz();
OWLClass owlClass = getOWLClass(clazz);
return factory.getSWRLClassAtom(owlClass, argument);
}
} else if (atom instanceof PropertyAtom) {
return getSWRLObjectPropertyAtom((PropertyAtom) atom);
} else if (atom instanceof DifferentFromAtom) {
return getSWRLDifferentFromAtom((DifferentFromAtom) atom);
} else if (atom instanceof SameAsAtom) {
return getSWRLSameAsAtom((SameAsAtom) atom);
}
throw new RuntimeException("Found unknown atom (" + atom + ") in SWRL rule.");
}
private SWRLAtom getSWRLDifferentFromAtom(DifferentFromAtom atom) {
IObject objectA = atom.getObjectA();
IObject objectB = atom.getObjectB();
SWRLIArgument argumentA = getSWRLArgument(objectA);
SWRLIArgument argumentB = getSWRLArgument(objectB);
return factory.getSWRLDifferentIndividualsAtom(argumentA, argumentB);
}
private SWRLAtom getSWRLSameAsAtom(SameAsAtom atom) {
IObject objectA = atom.getObjectA();
IObject objectB = atom.getObjectB();
SWRLIArgument argumentA = getSWRLArgument(objectA);
SWRLIArgument argumentB = getSWRLArgument(objectB);
return factory.getSWRLSameIndividualAtom(argumentA, argumentB);
}
private SWRLAtom getSWRLObjectPropertyAtom(PropertyAtom propertyAtom) {
Feature property = propertyAtom.getProperty();
if (property instanceof ObjectProperty) {
ObjectProperty objectProperty = (ObjectProperty) property;
OWLObjectProperty owlObjectProperty = getOWLObjectProperty(objectProperty);
IObject iObject = propertyAtom.getSource();
UnknownObject dObject = propertyAtom.getTarget();
SWRLIArgument argument1 = getSWRLArgument(iObject);
SWRLIArgument argument2 = getSWRLArgument(dObject);
return factory.getSWRLObjectPropertyAtom(owlObjectProperty, argument1, argument2);
} else if (property instanceof DataProperty) {
DataProperty dataProperty = (DataProperty) property;
OWLDataProperty owlDataProperty = getOWLDataProperty(dataProperty);
IObject iObject = propertyAtom.getSource();
UnknownObject dObject = propertyAtom.getTarget();
if (dObject instanceof DObject) {
SWRLIArgument argument1 = getSWRLArgument(iObject);
SWRLDArgument argument2 = getSWRLArgument((DObject) dObject);
return factory.getSWRLDataPropertyAtom(owlDataProperty, argument1, argument2);
}
}
throw new RuntimeException("Found unknown property atom (" + property + ") in SWRL rule.");
}
private OWLClass getOWLClass(Class clazz) {
String fullIRI = getFullIRI(clazz);
OWLClass owlClass = factory.getOWLClass(IRI.create(fullIRI));
return owlClass;
}
private String getFullIRI(IRIIdentified object) {
String iri = object.getIri();
OntologyDocument rootContainer = (OntologyDocument) EcoreUtil.getRootContainer(object);
String uri = rootContainer.getOntology().getUri();
String fullIRI = uri + "#" + iri;
return fullIRI;
}
private OWLObjectProperty getOWLObjectProperty(ObjectProperty property) {
String fullIRI = getFullIRI(property);
OWLObjectProperty owlProperty = factory.getOWLObjectProperty(IRI.create(fullIRI));
return owlProperty;
}
private OWLDataProperty getOWLDataProperty(DataProperty property) {
String fullIRI = getFullIRI(property);
OWLDataProperty owlProperty = factory.getOWLDataProperty(IRI.create(fullIRI));
return owlProperty;
}
private SWRLIArgument getSWRLArgument(UnknownObject object) {
if (object instanceof IVariable) {
IVariable variable = (IVariable) object;
IRI varIRI = IRI.create(variable.getIri());
return factory.getSWRLVariable(varIRI);
}
throw new RuntimeException("Found unknown IObject in SWRL rule.");
}
private SWRLDArgument getSWRLArgument(DObject object) {
if (object instanceof DVariable) {
DVariable variable = (DVariable) object;
IRI varIRI = IRI.create(variable.getIri());
return factory.getSWRLVariable(varIRI);
} else if (object instanceof DLiteral) {
DLiteral dLiteral = (DLiteral) object;
Literal literal = dLiteral.getLiteral();
OWLLiteral owlLiteral = getOWLLiteral(literal);
return factory.getSWRLLiteralArgument(owlLiteral);
}
throw new RuntimeException("Found unknown DObject (" + object + ") in SWRL rule.");
}
private OWLLiteral getOWLLiteral(Literal literal) {
if (literal instanceof BooleanLiteral) {
BooleanLiteral bLiteral = (BooleanLiteral) literal;
return factory.getOWLTypedLiteral(bLiteral.isValue());
}
throw new RuntimeException("Found unknown Literal (" + literal + ") in SWRL rule.");
}
}
<file_sep>/Core/org.emftext.language.owl.test/src/org/emftext/language/owl/test/resolving/OWLModelComparator.java
package org.emftext.language.owl.test.resolving;
///*******************************************************************************
// * Copyright (c) 2006-2012
// * Software Technology Group, Dresden University of Technology
// * DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
// *
// * All rights reserved. This program and the accompanying materials
// * are made available under the terms of the Eclipse Public License v1.0
// * which accompanies this distribution, and is available at
// * http://www.eclipse.org/legal/epl-v10.html
// *
// * Contributors:
// * Software Technology Group - TU Dresden, Germany;
// * DevBoost GmbH - Berlin, Germany
// * - initial API and implementation
// ******************************************************************************/
//package org.emftext.language.owl.test.resolving;
//
//import static org.junit.Assert.assertEquals;
//import static org.junit.Assert.assertTrue;
//
//import java.util.Map;
//
//import org.eclipse.emf.ecore.EPackage;
//import org.eclipse.emf.ecore.resource.Resource;
//import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
//import org.emftext.language.owl.Class;
//import org.emftext.language.owl.ClassAtomic;
//import org.emftext.language.owl.Conjunction;
//import org.emftext.language.owl.Disjunction;
//import org.emftext.language.owl.Ontology;
//import org.emftext.language.owl.OntologyDocument;
//import org.emftext.language.owl.OwlPackage;
//import org.junit.Before;
//import org.junit.Test;
//
//public class OWLModelComparatorTest {
//
// private ModelComparator modelComparator;
//
// private void registerFactories() {
// EPackage.Registry.INSTANCE.put(OwlPackage.eNS_URI, OwlPackage.eINSTANCE);
// Map<String, Object> extensionToFactoryMap = Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap();
// extensionToFactoryMap.put("owl",new org.emftext.language.owl.resource.owl.mopp.OwlResourceFactory());
// extensionToFactoryMap.put("xmi", new XMIResourceFactoryImpl());
// }
//
// @Before
// public void setUp() {
// modelComparator = new ModelComparator();
// registerFactories();
// }
//
// @Test
// public void testOntologiesAreEqualUsingDiff() {
// assertTrue(modelComparator.areModelsEqualRegardingToDiff("simpleOntology_1.owl",
// "simpleOntology_2.owl"));
// }
//
// @Test
// public void testOntologiesAreEqualUsingMatch() {
// assertTrue(modelComparator.areModelsEqualRegardingToMatch("simpleOntology_1.owl",
// "simpleOntology_2.owl"));
// }
//
// @Test
// public void testReferencedClassIsEqualToRealClass() {
// OntologyDocument ontologyDocument = (OntologyDocument)
// ModelStorageUtil.loadModelFromFileName("simpleOntology_1.owl");
// Ontology ontology = ontologyDocument.getOntology();
// Class superClass = (Class) ontology.getFrames().get(0);
// Class subClass = (Class) ontology.getFrames().get(1);
// Class referencedSuperClass = getFirstSuperClass(subClass);
// assertEquals("Superclass and referenced superclass should be equal", superClass,
// referencedSuperClass);
// }
//
//
//
// private Class getFirstSuperClass(Class subClass) {
// Disjunction disjunction = (Disjunction) subClass.getSuperClassesDescriptions().get(0);
// Conjunction conjunction = (Conjunction) disjunction.getConjunctions().get(0);
// ClassAtomic classAtomic = (ClassAtomic) conjunction.getPrimaries().get(0);
// return classAtomic.getClazz();
// }
//
//}
<file_sep>/Core/org.emftext.language.owl.test/src/org/emftext/language/owl/test/printing/OwlPrintingTest.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owl.test.printing;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.emftext.language.owl.Class;
import org.emftext.language.owl.ClassAtomic;
import org.emftext.language.owl.FeatureReference;
import org.emftext.language.owl.Namespace;
import org.emftext.language.owl.NestedDescription;
import org.emftext.language.owl.ObjectProperty;
import org.emftext.language.owl.ObjectPropertyMax;
import org.emftext.language.owl.Ontology;
import org.emftext.language.owl.OntologyDocument;
import org.emftext.language.owl.OwlFactory;
import org.emftext.language.owl.OwlPackage;
import org.emftext.language.owl.impl.OwlFactoryImpl;
import org.emftext.language.owl.resource.owl.mopp.OwlResource;
import org.junit.Test;
public class OwlPrintingTest {
public OwlPrintingTest() {
registerFactories();
}
private void registerFactories() {
EPackage.Registry.INSTANCE.put(OwlPackage.eNS_URI, OwlPackage.eINSTANCE);
Map<String, Object> extensionToFactoryMap = Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap();
extensionToFactoryMap.put("owl",new org.emftext.language.owl.resource.owl.mopp.OwlResourceFactory());
}
@Test
public void testPrintingOfIriReferencesWithEmptyPrefix() throws IOException {
OwlFactory owlFactory = OwlFactoryImpl.eINSTANCE;
OntologyDocument od = owlFactory.createOntologyDocument();
Ontology o = owlFactory.createOntology();
od.setOntology(o);
o.setUri("http://owl/printing.test");
Namespace local = owlFactory.createNamespace();
local.setImportedOntology(o);
local.setPrefix(":");
od.getNamespace().add(local);
Class employee = owlFactory.createClass();
employee.setIri("Employee");
o.getFrames().add(employee);
Class employeeIdDomain = owlFactory.createClass();
employeeIdDomain.setIri("EmployeeIdDomain");
o.getFrames().add(employeeIdDomain);
ObjectProperty id = owlFactory.createObjectProperty();
id.setIri("id");
o.getFrames().add(id);
ObjectPropertyMax objectPropertyMax = owlFactory.createObjectPropertyMax();
objectPropertyMax.setValue(5);
ClassAtomic clazzRef = OwlFactory.eINSTANCE.createClassAtomic();
clazzRef.setClazz(employeeIdDomain);
objectPropertyMax.setPrimary(clazzRef );
FeatureReference featureRef = owlFactory.createFeatureReference();
featureRef.setFeature(id);
objectPropertyMax.setFeatureReference(featureRef );
NestedDescription nestedDescription = owlFactory.createNestedDescription();
nestedDescription.setNot(true);
nestedDescription.setDescription(objectPropertyMax);
employee.getSuperClassesDescriptions().add(nestedDescription);
ClassAtomic eca = owlFactory.createClassAtomic();
eca.setClazz(employee);
id.getPropertyRange().add(eca);
ClassAtomic eidd = owlFactory.createClassAtomic();
eidd.setClazz(employeeIdDomain);
id.getDomain().add(eidd);
ResourceSet rs = new ResourceSetImpl();
URI uri = URI.createURI("./out/printingTest.owl");
Resource newResource = rs.createResource(uri);
newResource.getContents().add(od);
newResource.save(Collections.EMPTY_MAP);
newResource.unload();
// reparse test, would fail when generated resource is invalid
Resource reloadedResource = rs.getResource(uri, true);
assertTrue(reloadedResource.getContents().size() == 1);
assertTrue(reloadedResource instanceof OwlResource);
assertTrue(((OwlResource) reloadedResource).getErrors().size() == 0);
}
}
<file_sep>/Core/org.emftext.language.owl.reasoning/src/org/emftext/language/owl/reasoning/ConsistencyChecker.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.language.owl.reasoning;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import org.emftext.language.owl.Frame;
import org.emftext.language.owl.IRIIdentified;
import org.emftext.language.owl.OntologyDocument;
import org.emftext.language.owl.loading.OntologyLoadExeption;
import org.emftext.language.owl.resource.owl.IOwlOptionProvider;
import org.emftext.language.owl.resource.owl.IOwlOptions;
import org.emftext.language.owl.resource.owl.IOwlResourcePostProcessor;
import org.emftext.language.owl.resource.owl.IOwlResourcePostProcessorProvider;
import org.emftext.language.owl.resource.owl.OwlEProblemType;
import org.emftext.language.owl.resource.owl.analysis.custom.CrossResourceIRIResolver;
import org.emftext.language.owl.resource.owl.mopp.OwlResource;
public class ConsistencyChecker implements IOwlResourcePostProcessor,
IOwlResourcePostProcessorProvider, IOwlOptionProvider {
public Map<?, ?> getOptions() {
Map<String, Object> options = new HashMap<String, Object>();
options.put(IOwlOptions.RESOURCE_POSTPROCESSOR_PROVIDER,
new ConsistencyChecker());
return options;
}
public ConsistencyChecker() {
super();
}
public void process(OwlResource resource) {
EObject root = resource.getContents().get(0);
if (root instanceof OntologyDocument) {
OntologyDocument od = (OntologyDocument) root;
checkImportedElements(od, resource);
}
}
private void checkImportedElements(OntologyDocument od, OwlResource resource) {
CrossResourceIRIResolver iriResolver = CrossResourceIRIResolver
.theInstance();
EList<Frame> frames = od.getOntology().getFrames();
for (Frame frame : frames) {
String iri = frame.getIri();
// ignore anonymous frames
if (iri == null)
continue;
if (iriResolver.hasPrefix(frame.getIri())) {
String prefix = iriResolver.getPrefix(iri);
List<IRIIdentified> entity;
try {
entity = iriResolver.getOntologyEntity(prefix, od,
iriResolver.getId(iri), false);
if (entity.size() < 1) {
resource
.addWarning(
"The referenced iri-identified element could not be resolved in the imported ontology",
OwlEProblemType.ANALYSIS_PROBLEM,
frame);
}
} catch (OntologyLoadExeption e) {
resource.addWarning(e.getMessage(), OwlEProblemType.ANALYSIS_PROBLEM, frame);
}
}
}
}
public IOwlResourcePostProcessor getResourcePostProcessor() {
return new ConsistencyChecker();
}
public void terminate() {
// do nothing
}
}
<file_sep>/Core/org.emftext.language.owl.reasoning/build.properties
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
plugin.xml,\
lib/pellet-core.jar,\
lib/pellet-datatypes.jar,\
lib/pellet-dig.jar,\
lib/pellet-el.jar,\
lib/pellet-explanation.jar,\
lib/pellet-owlapiv3.jar,\
lib/pellet-pellint.jar,\
lib/aterm-java-1.6.jar,\
lib/pellet-rules.jar,\
.,\
lib/jena/jena-2.6.3.jar,\
lib/owlapiv3/owlapi-bin.jar,\
lib/jgrapht/jgrapht-jdk1.5.jar
<file_sep>/Extensions/org.emftext.runtime.owltext/src/org/emftext/runtime/owltext/LiteralConverter.java
/*******************************************************************************
* Copyright (c) 2006-2012
* Software Technology Group, Dresden University of Technology
* DevBoost GmbH, Berlin, Amtsgericht Charlottenburg, HRB 140026
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Software Technology Group - TU Dresden, Germany;
* DevBoost GmbH - Berlin, Germany
* - initial API and implementation
******************************************************************************/
package org.emftext.runtime.owltext;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.emftext.language.owl.AbbreviatedXSDStringLiteral;
import org.emftext.language.owl.Datatype;
import org.emftext.language.owl.FloatingPointLiteral;
import org.emftext.language.owl.IntegerLiteral;
import org.emftext.language.owl.Literal;
import org.emftext.language.owl.OwlFactory;
import org.emftext.language.owl.TypedLiteral;
public class LiteralConverter {
private OwlFactory factory = OwlFactory.eINSTANCE;
private Datatype xsdBoolean;
private Datatype xsdLong;
private Datatype xsdShort;
private Datatype xsdDouble;
private Datatype xsdByte;
private Datatype xsdDate;
private Datatype xsdDecimal;
public LiteralConverter() {
xsdBoolean = factory.createDatatype();
xsdBoolean.setIri("xsd:boolean");
xsdLong = factory.createDatatype();
xsdLong.setIri("xsd:long");
xsdShort = factory.createDatatype();
xsdShort.setIri("xsd:short");
xsdDouble = factory.createDatatype();
xsdDouble.setIri("xsd:double");
xsdByte = factory.createDatatype();
xsdByte.setIri("xsd:byte");
xsdDecimal = factory.createDatatype();
xsdDecimal.setIri("xsd:decimal");
xsdDate = factory.createDatatype();
xsdDate.setIri("xsd:dateTime");
}
public Literal convert(Object newValue) {
if (newValue == null)
return null;
if (newValue instanceof String || newValue instanceof char[]
|| newValue instanceof Character) {
return doConvert(newValue.toString());
} else if (newValue instanceof Integer) {
return doConvert((Integer) newValue);
} else if (newValue instanceof BigInteger) {
return doConvert((BigInteger) newValue);
} else if (newValue instanceof Long) {
return doConvert((Long) newValue);
} else if (newValue instanceof Short) {
return doConvert((Short) newValue);
} else if (newValue instanceof BigDecimal) {
return doConvert((BigDecimal) newValue);
} else if (newValue instanceof Float) {
return doConvert((Float) newValue);
} else if (newValue instanceof Double) {
return doConvert((Double) newValue);
} else if (newValue instanceof Boolean) {
return doConvert((Boolean) newValue);
} else if (newValue instanceof Byte) {
return doConvert((Byte) newValue);
} else if (newValue instanceof Date) {
return doConvert((Date) newValue);
} else {
AbbreviatedXSDStringLiteral textLiteral = factory
.createAbbreviatedXSDStringLiteral();
textLiteral.setValue("The attribute value of type: "
+ newValue.getClass()
+ " could not be converted to a literal");
return textLiteral;
}
}
public Literal doConvert(String newValue) {
AbbreviatedXSDStringLiteral textLiteral = factory
.createAbbreviatedXSDStringLiteral();
textLiteral.setValue(newValue);
return textLiteral;
}
public Literal doConvert(Integer newValue) {
IntegerLiteral textLiteral = factory.createIntegerLiteral();
textLiteral.setValue(newValue);
return textLiteral;
}
public Literal doConvert(BigInteger newValue) {
IntegerLiteral textLiteral = factory.createIntegerLiteral();
textLiteral.setValue(newValue.intValue());
return textLiteral;
}
public Literal doConvert(Float newValue) {
FloatingPointLiteral textLiteral = factory.createFloatingPointLiteral();
textLiteral.setLiteral(newValue);
return textLiteral;
}
public Literal doConvert(BigDecimal newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdDecimal);
return textLiteral;
}
public Literal doConvert(Boolean newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdBoolean);
return textLiteral;
}
public Literal doConvert(Long newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdLong);
return textLiteral;
}
public Literal doConvert(Short newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdShort);
return textLiteral;
}
public Literal doConvert(Double newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdDouble);
return textLiteral;
}
public Literal doConvert(Byte newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdByte);
return textLiteral;
}
public Literal doConvert(Date newValue) {
TypedLiteral textLiteral = factory.createTypedLiteral();
textLiteral.setLexicalValue(newValue.toString());
textLiteral.setTheDatatype(xsdDate);
return textLiteral;
}
@SuppressWarnings("unchecked")
public <T> T reconvert(Class<T> targetClass, Literal literal) {
if (literal == null)
return null;
if (targetClass.equals(String.class)) {
return (T) createString(literal);
}
if (targetClass.equals(Character.class)
|| (targetClass.equals(char.class))) {
return (T) createCharacter(literal);
}
if (targetClass.equals(Integer.class)
|| (targetClass.equals(int.class))) {
return (T) createInteger(literal);
}
if (targetClass.equals(Boolean.class)
|| (targetClass.equals(boolean.class))) {
return (T) createBoolean(literal);
}
if (targetClass.equals(BigInteger.class)) {
return (T) createBigInteger(literal);
}
if (targetClass.equals(Long.class) || (targetClass.equals(long.class))) {
return (T) createLong(literal);
}
if (targetClass.equals(Short.class)
|| (targetClass.equals(short.class))) {
return (T) createShort(literal);
}
if (targetClass.equals(BigDecimal.class)) {
return (T) createBigDecimal(literal);
}
if (targetClass.equals(Float.class)
|| (targetClass.equals(float.class))) {
return (T) createFloat(literal);
}
if (targetClass.equals(Double.class)
|| (targetClass.equals(double.class))) {
return (T) createDouble(literal);
}
if (targetClass.equals(Byte.class) || (targetClass.equals(byte.class))) {
return (T) createByte(literal);
}
if (targetClass.equals(Date.class)) {
return (T) createDate(literal);
}
throw new RuntimeException(
"Conversion error. Requested datatype not supported: "
+ targetClass);
}
private Character createCharacter(Literal literal) {
AbbreviatedXSDStringLiteral textLiteral = (AbbreviatedXSDStringLiteral) literal;
return new Character(textLiteral.getValue().charAt(0));
}
private String createString(Literal literal) {
AbbreviatedXSDStringLiteral textLiteral = (AbbreviatedXSDStringLiteral) literal;
return new String(textLiteral.getValue());
}
private Date createDate(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
try {
return new SimpleDateFormat().parse(tl.getLexicalValue());
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
private Byte createByte(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
return new Byte(tl.getLexicalValue());
}
private Double createDouble(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
return new Double(tl.getLexicalValue());
}
private BigDecimal createBigDecimal(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
return new BigDecimal(tl.getLexicalValue());
}
private Short createShort(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
return new Short(tl.getLexicalValue());
}
private Boolean createBoolean(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
return new Boolean(tl.getLexicalValue());
}
private Long createLong(Literal literal) {
TypedLiteral tl = (TypedLiteral) literal;
return new Long(tl.getLexicalValue());
}
private Float createFloat(Literal literal) {
FloatingPointLiteral fpl = (FloatingPointLiteral) literal;
return new Float(fpl.getLiteral());
}
private BigInteger createBigInteger(Literal literal) {
IntegerLiteral il = (IntegerLiteral) literal;
return new BigInteger("" + il.getValue());
}
private Integer createInteger(Literal literal) {
IntegerLiteral il = (IntegerLiteral) literal;
return new Integer(il.getValue());
}
}
| 266843e706e2f3f5e4babf4999f640c2ec8c3dd2 | [
"C#",
"Java",
"Markdown",
"INI"
] | 27 | Java | frostyandy2k/OntoMoPP | bd730e7fca87e79d62162c07374d2fe99d9d8680 | d09a6d4075946262227021d57504fa1b1e74d1c8 |
refs/heads/master | <file_sep>package grpcserver
import (
"github.com/golang/glog"
pb "github.com/peterbradford/gpioscheduler/protos"
"github.com/peterbradford/gpioscheduler/server/gpioscheduler"
"golang.org/x/net/context"
"google.golang.org/grpc"
"net"
//"github.com/golang/protobuf/ptypes/empty"
)
//grpcserver
type gpioSchedulerService struct{}
func newServer() *gpioSchedulerService {
return new(gpioSchedulerService)
}
func (s *gpioSchedulerService) Health(ctx context.Context, request *pb.Empty) (*pb.HealthReply, error) {
//TODO maybe check rpio?
return &pb.HealthReply{Message: "Health Check: OK"}, nil
}
func (s *gpioSchedulerService) GetSystemStatus(ctx context.Context, request *pb.Empty) (*pb.Pins, error) {
result, err := gpioscheduler.GetSystemStatus()
if err != nil {
return nil, err
}
return result, nil
}
func (s *gpioSchedulerService) TurnOnPin(ctx context.Context, request *pb.Pin) (*pb.Pin, error) {
result, err := gpioscheduler.TurnOn(request.Pin)
if err != nil {
return nil, err
}
return result, nil
}
func (s *gpioSchedulerService) TurnOffPin(ctx context.Context, request *pb.Pin) (*pb.Pin, error) {
result, err := gpioscheduler.TurnOff(request.Pin)
if err != nil {
return nil, err
}
return result, nil
}
func (s *gpioSchedulerService) SetSchedule(ctx context.Context, request *pb.Schedule) (*pb.Schedule, error) {
result, err := gpioscheduler.SetSchedule(*request)
if err != nil {
return nil, err
}
return result, nil
}
func (s *gpioSchedulerService) GetSchedule(ctx context.Context, request *pb.Pin) (*pb.Schedule, error) {
result, err := gpioscheduler.GetSchedule(*request)
if err != nil {
return nil, err
}
return result, nil
}
func (s *gpioSchedulerService) AddPin(ctx context.Context, request *pb.Pin) (*pb.Pin, error) {
result, err := gpioscheduler.AddPin(*request)
if err != nil {
return nil, err
}
return result, nil
}
func (s *gpioSchedulerService) RemovePin(ctx context.Context, request *pb.Pin) (*pb.Pin, error) {
result, err := gpioscheduler.RemovePin(*request)
if err != nil {
return nil, err
}
return result, nil
}
func RunGRPC(addr string) error {
lis, err := net.Listen("tcp", addr)
if err != nil {
return err
}
var opts []grpc.ServerOption
grpcServer := grpc.NewServer(opts...)
pb.RegisterGPIOSchedulerServiceServer(grpcServer, newServer())
glog.Infof("GRPC available on %s\n", addr)
glog.Flush()
return grpcServer.Serve(lis)
}
<file_sep>package gpioscheduler
import (
"container/heap"
"errors"
"fmt"
"github.com/golang/glog"
pb "github.com/peterbradford/gpioscheduler/protos"
"github.com/peterbradford/gpioscheduler/server/config"
"github.com/peterbradford/gpioscheduler/server/priorityqueue"
"github.com/stianeikeland/go-rpio"
"strconv"
"sync"
"time"
)
var physicalPins = [28]uint32{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27}
var pins pb.Pins
var pq SafePriorityQueue
type SafePriorityQueue struct {
pq priorityqueue.PriorityQueue
mux sync.Mutex
}
var GetSystemStatus = func() (*pb.Pins, error) {
defer glog.Flush()
glog.Infof("System Status Called: %v\n", pins)
return &pins, nil
}
var AddPin = func(pin pb.Pin) (*pb.Pin, error) {
defer glog.Flush()
if !validatePin(pin.Pin) {
glog.Infof("Request pin:" + strconv.Itoa(int(pin.Pin)) + ", is not a valid pin")
return nil, errors.New("Request pin:" + strconv.Itoa(int(pin.Pin)) + ", is not a valid pin")
}
_, exist := pins.Pins[pin.Pin]
if !exist {
pins.Pins[pin.Pin] = &pin
glog.Infof("Added Pin: %o\n", pin.Pin)
} else {
glog.Infof("Pin already exists: %o\n", pin.Pin)
}
return pins.Pins[pin.Pin], nil
}
var RemovePin = func(pin pb.Pin) (*pb.Pin, error) {
defer glog.Flush()
old, exists := pins.Pins[pin.Pin]
if exists {
glog.Infof("Removed Pin: %v\n", old)
delete(pins.Pins, pin.Pin)
return old, nil
} else {
glog.Infof("Request pin: %d, isn't a registered pin", int(pin.Pin))
return nil, errors.New("requested pin isn't a registered pin")
}
}
func validatePin(pin uint32) bool {
for p := range physicalPins {
if pin == uint32(p) {
return true
}
}
return false
}
func init() {
defer glog.Flush()
config.ApplicationConfig()
glog.Info("Initializing the pins collection")
pins.Pins = make(map[uint32]*pb.Pin)
glog.Info("Initializing the priorityqueue")
pq = SafePriorityQueue{pq: make(priorityqueue.PriorityQueue, 0)}
pq.mux.Lock()
heap.Init(&pq.pq)
pq.mux.Unlock()
//gpioscheduler = make(pb.SprinklerSystem)
//if err:=rpio.Open(); err != nil {
// glog.Info(err)
// glog.Flush()
// os.Exit(1)
//}
}
func scheduler() {
glog.Info("Scheduler starting")
glog.Flush()
var val *pb.Schedule
for {
pq.mux.Lock()
//for i := 0; i < pq.pq.Len(); i++ {
// fmt.Printf("obj in pq: %v\n", pq.pq[i])
//}
if pq.pq.Read() != nil && pq.pq.Read().(*pb.Schedule).Priority < time.Now().UnixNano() {
val = pq.pq.Read().(*pb.Schedule)
fmt.Printf("addr: %p\n", &val)
fmt.Println("turning on sprinkler", val)
UpdateTime(*pins.Pins[val.Pin].Schedule, val.NextWatering)
t,_ := time.Parse(TIME_FORM, val.NextWatering)
pq.pq.Update(pins.Pins[val.Pin].Schedule, t)
}
//TODO get nextWatering
//pq.pq.Update(pins.Pins[schedule.Pin].Schedule, nextWatering)
pq.mux.Unlock()
time.Sleep(time.Second * 5)
}
}
func RunPinServer() {
defer rpio.Close()
scheduler()
//defer func() {
// glog.Info("Probably closed rpio")
//}()
}
<file_sep>package main
import (
"google.golang.org/grpc"
"log"
"time"
"golang.org/x/net/context"
pb "github.com/peterbradford/sprinklers/protos"
"github.com/golang/protobuf/ptypes/empty"
)
func healthCheck(client pb.SprinklerServiceClient ) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
feature, err := client.Health(ctx, &empty.Empty{})
if err != nil {
log.Fatalf("%v.GetFeatures(_) = _, %v: ", client, err)
}
log.Println(feature)
}
func main(){
var opts []grpc.DialOption
opts = append(opts, grpc.WithInsecure())
conn, err := grpc.Dial("localhost:4321", opts...)
if err != nil {
log.Fatalf("fail to dial: %v", err)
}
defer conn.Close()
client := pb.NewSprinklerServiceClient(conn)
healthCheck(client)
}<file_sep>// app.js
(function() {
angular.module("app", []).controller("homeController", homeController);
function homeController(mainService) {
var vm = this;
vm.healthStatus = null;
// Functions
vm.getHealth = function() {
mainService.getHealth()
.then(function(response){
vm.healthStatus = response;
vm.healthStatusSuccess = true;
});
};
vm.addZone = function (zone) {
var zoneObject = {
pin: zone
};
mainService.addZone(zoneObject)
.then(function(response){
console.log(response);
vm.addZoneStatus = response;
vm.addZoneStatusSuccess = true;
});
}
} //end of controller
})(); //end of iffe
<file_sep>package grpcgwserver
import (
"net/http"
"golang.org/x/net/context"
"google.golang.org/grpc"
"github.com/golang/glog"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
gw "github.com/peterbradford/gpioscheduler/protos"
)
func RunGRPCGW(opts string) error {
ctx := context.Background()
ctx, cancel := context.WithCancel(ctx)
defer cancel()
mux := runtime.NewServeMux(runtime.WithMarshalerOption(runtime.MIMEWildcard, &runtime.JSONPb{OrigName: true, EmitDefaults: true}))
optss := []grpc.DialOption{grpc.WithInsecure()}
err := gw.RegisterGPIOSchedulerServiceHandlerFromEndpoint(ctx, mux, "localhost:4321", optss)
if err != nil {
return err
}
glog.Infof("REST available on port: %d\n", 4322)
glog.Flush()
gwserver := &http.Server{
Addr: opts,
Handler: allowCORS(mux),
}
go func() {
<-ctx.Done()
glog.Infof("Shutting down the GRPC-GW server")
if err := gwserver.Shutdown(context.Background()); err != nil {
glog.Errorf("Failed to shutdown http server: %v", err)
}
}()
glog.Infof("GRPC-GW starting on %s", opts)
if err := gwserver.ListenAndServe(); err != http.ErrServerClosed {
glog.Errorf("Failed to listen and serve: %v", err)
return err
}
return nil
}<file_sep>package main
import (
"github.com/golang/glog"
"github.com/peterbradford/gpioscheduler/server/fileserver"
"github.com/peterbradford/gpioscheduler/server/grpcserver"
"github.com/peterbradford/gpioscheduler/server/grpcgwserver"
"github.com/peterbradford/gpioscheduler/server/gpioscheduler"
)
func grpcGWServer() {
opts := "localhost:4322"
if err := grpcgwserver.RunGRPCGW(opts); err != nil {
glog.Fatal(err)
}
}
func grpcServer() {
opts := "localhost:4321"
if err := grpcserver.RunGRPC(opts); err != nil {
glog.Fatal(err)
}
}
func fileServer() {
opts := "42069"
if err := fileserver.RunFileServer(opts); err != nil {
glog.Fatal(err)
}
}
func main(){
go fileServer()
go grpcGWServer()
go grpcServer()
gpioscheduler.RunPinServer()
}<file_sep>package fileserver
import (
"net/http"
"github.com/golang/glog"
"os"
)
func RunFileServer(port string) error{
dir, err := os.Getwd()
if err != nil {
glog.Fatal(err)
}
http.Handle("/", http.FileServer(http.Dir(dir+"/frontend")))
glog.Infof("Serving frontend on localhost:%s\n", port)
glog.Flush()
return http.ListenAndServe(":"+port, nil)
}<file_sep>package config
import (
"flag"
"github.com/golang/glog"
)
var configured = false
func ApplicationConfig() {
if !configured {
//wdir, err := os.Getwd()
//if err != nil {
// log.Fatal(err)
//}
flag.Parse()
//flag.Lookup("log_dir").Value.Set(wdir + "/logs")
flag.Lookup("logtostderr").Value.Set("true")
glog.Info("Initialized glog")
defer glog.Flush()
}
configured = true
}<file_sep>// mainService.js
(function() {
angular
.module('app')
.service('mainService', mainService);
function mainService ($http) {
var url = 'http://localhost:4322/';
$http.headers = {
'Content-Type': 'application/json; charset=utf-8'
}
this.getHealth = function () {
return $http.get(url + 'health')
.then(function (response) {
return response.data;
}, function (error) {
return error;
});
};
this.addZone = function(zone) {
return $http.post(url + 'pin', zone)
.then(function (response) {
return response.data;
}, function(error) {
return error;
})
};
} //end of mainService
})();//end of iffe
<file_sep>package priorityqueue
import (
"container/heap"
pb "github.com/peterbradford/gpioscheduler/protos"
"time"
)
type PriorityQueue []*pb.Schedule
func (pq PriorityQueue) Len() int { return len(pq) }
func (pq PriorityQueue) Less(i, j int) bool {
return pq[i].Priority > pq[j].Priority
}
func (pq *PriorityQueue) Pop() interface{} {
old := *pq
n := len(old)
schedule := old[n-1]
schedule.Index = -1
*pq = old[0 : n-1]
return schedule
}
func (pq *PriorityQueue) Push(x interface{}) {
n := len(*pq)
schedule := x.(*pb.Schedule)
schedule.Index = int64(n)
*pq = append(*pq, schedule)
}
func (pq *PriorityQueue) Read() interface{} {
if pq.Len() == 0 {
return nil
} else {
old := *pq
return old[len(old)-1]
}
}
func (pq PriorityQueue) Swap(i, j int) {
pq[i], pq[j] = pq[j], pq[i]
pq[i].Index = int64(i)
pq[j].Index = int64(j)
}
func (pq *PriorityQueue) Update(schedule *pb.Schedule, priority time.Time) {
schedule.Priority = priority.UnixNano()
heap.Fix(pq, int(schedule.Index))
}<file_sep># GPIO Scheduler
A simple web interface for maintaining gpio pins and setting schedules for gpio pins.
The front end in this case is an example of a sprinkler system. Although this can be
used for any general scheduling of gpio pins.
NOTE: Re-evaluate the TurnOn and TurnOff methods, the relay that this program was build
along side of required the 'On' to output.low on the pin.
## Install GO
* brew install go
###### or (preferred)
* https://golang.org/dl/
* e.g.
* $ curl -O https://dl.google.com/go/go1.10.3.linux-amd64.tar.gz
* $ tar -C /usr/local -xzf go1.10.3.linux-amd64.tar.gz
###### then
* add go to paths
* e.g.
* $ export PATH=$PATH:/usr/local/go/bin //added to '/etc/profile' or '$HOME/.profile'
* $ (optional unless you want to use a directory that's NOT under $HOME/go)export GOPATH=$HOME/go //add to $HOME/.profile or $HOME/.bashrc
* $ (optional)export PATH=$PATH:$GOPATH/bin //add to $HOME/.profile or $HOME/.bashrc
## Install Protobuf
* The following tools are needed to build protobuf
* $ sudo apt-get install autoconf automake libtool curl make g++ unzip
* $ mkdir tmp
* $ cd tmp
* $ git clone https://github.com/google/protobuf
* $ cd protobuf
* $ git submodule update --init --recursive
* $ ./autogen.sh
* $ ./configure
* $ make
* $ make check
* $ sudo make install
* $ sudo ldconfig # refresh shared library cache.
By default this will download to /usr/local. If /usr/local/lib is not part of LD_LIBRARY_PATH. You can install to /usr instead. Use the following command to do this:
* $ ./configure --prefix=/usr
## Install protoc, gateway, swagger
* $ go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway
* $ go get -u github.com/golang/protobuf/protoc-gen-go
## Install gpioscheduler
* $ go get -u github.com/peterbradford/gpioscheduler
## Running the service
* $ cd ./protos
* $ make
* $ cd ..
* $ go run main.go
* you should now be able to use this curl command and get a result
* curl -k http://localhost:4322/health
* curl -k http://localhost:4322/status
* curl -X POST -k http://localhost:4322/pin -d '{"pin":1}'
* curl -k http://localhost:4322/turnon/1
* curl -k http://localhost:4322/turnoff/1
* curl -X POST -k http://localhost:4322/schedule -d '{"pin":1, "daySchedule": {"Monday Aug-27-2018 12:30 MDT":30, "Monday Aug-28-2018 12:45 MDT":30}}'
* curl -X POST -k http://localhost:4322/schedule -d '{"pin":1, "daySchedule": {"Tuesday 09-25-2018 18:23 MDT":30}}'
* curl -k http://localhost:4322/schedule/1
* curl -X DELETE -k http://localhost:4322/pin/1
* you should also be able to go to localhost:42069 and see the index.html
<file_sep>package gpioscheduler
import (
"container/heap"
"errors"
"fmt"
"github.com/golang/glog"
pb "github.com/peterbradford/gpioscheduler/protos"
"math"
"strconv"
"time"
)
const TIME_FORM = "Monday 01-02-2006 15:04 MST"
//For whatever reason the relay wants 3.3 always on the gpioscheduler and 0v to turn on.
var TurnOn = func(pin uint32) (*pb.Pin, error) {
defer glog.Flush()
_, exists := pins.Pins[pin]
if exists {
glog.Infof("Turning On pin: %d", pin)
//pin := rpio.Pin(zone)
//pin.Output()
//glog.Infof("Before state: %o\n", pin.Read())
//pin.Low()
//glog.Infof("Current state after change: %o\n", pin.Read())
pins.Pins[pin].Status = true
return pins.Pins[pin], nil
} else {
glog.Infof("pin %d isn't a registered pin", pin)
return nil, errors.New("pin " + strconv.Itoa(int(pin)) + " isn't a registered pin")
}
}
var TurnOff = func(pin uint32) (*pb.Pin, error) {
defer glog.Flush()
_, exists := pins.Pins[pin]
if exists {
glog.Infof("Turning Off pin: %d", pin)
//pin := rpio.Pin(zone)
//pin.Output()
//glog.Infof("Before state: %o\n", pin.Read())
//pin.High()
//glog.Infof("Current state after change: %o\n", pin.Read())
pins.Pins[pin].Status = false
return pins.Pins[pin], nil
} else {
glog.Infof("pin %d isn't a registered pin", pin)
return nil, errors.New("pin " + strconv.Itoa(int(pin)) + " isn't a registered pin")
}
}
var GetSchedule = func(pin pb.Pin) (*pb.Schedule, error) {
defer glog.Flush()
_, exists := pins.Pins[pin.Pin]
if exists {
if pins.Pins[pin.Pin].Schedule == nil {
glog.Infof("no schedule set for pin:%d", pin.Pin)
return nil, errors.New("no schedule set for pin:" + strconv.Itoa(int(pin.Pin)))
}
schedule := pins.Pins[pin.Pin].Schedule
glog.Infof("Get Schedule\tSchedule: %v", schedule)
return schedule, nil
} else {
glog.Infof("Pin %d doesn't exist\n", pin.Pin)
return nil, errors.New("Pin " + strconv.Itoa(int(pin.Pin)) + " doesn't exist")
}
}
var SetSchedule = func(schedule pb.Schedule) (*pb.Schedule, error) {
defer glog.Flush()
_, exists := pins.Pins[schedule.Pin]
if exists {
return addScheduleToPQ(schedule)
} else {
glog.Infof("Pin %d doesn't exist\n", schedule.Pin)
return nil, errors.New("Pin " + strconv.Itoa(int(schedule.Pin)) + " doesn't exist")
}
}
var addScheduleToPQ = func(schedule pb.Schedule) (*pb.Schedule, error) {
fmt.Printf("addr: %p\n", &schedule)
pins.Pins[schedule.Pin].Schedule = &schedule
fmt.Printf("addr: %p\n", pins.Pins[schedule.Pin].Schedule)
pq.mux.Lock()
heap.Push(&pq.pq, pins.Pins[schedule.Pin].Schedule)
nextWatering, err := getNextWatering(*pins.Pins[schedule.Pin].Schedule)
if err != nil {
pq.mux.Unlock()
return nil, err
}
pq.pq.Update(pins.Pins[schedule.Pin].Schedule, nextWatering)
pins.Pins[schedule.Pin].Schedule.NextWatering = nextWatering.Format(TIME_FORM)
pq.mux.Unlock()
return pins.Pins[schedule.Pin].Schedule, nil
}
var getNextWatering = func(schedule pb.Schedule) (time.Time, error) {
var nextWatering time.Time
max := int64(math.MaxInt64)
for k := range schedule.DaySchedule {
e := UpdateTime(schedule, k)
if e.UnixNano() < max {
nextWatering = e
max = e.UnixNano()
}
}
return nextWatering, nil
}
var UpdateTime = func(schedule pb.Schedule, stringVal string) time.Time{
fmt.Printf("type: %T\n", schedule)
fmt.Printf("addr: %p\n", &schedule)
t,_ := time.Parse(TIME_FORM, stringVal)
oldTVal := schedule.DaySchedule[stringVal]
delete(schedule.DaySchedule, stringVal)
for t.UnixNano() < time.Now().UnixNano() {
t = t.Add(time.Hour * 24 * 7)
}
schedule.DaySchedule[t.Format(TIME_FORM)] = oldTVal
return t
}
| 22bda9204df27c918c82c24c5dad7ce7615e7721 | [
"JavaScript",
"Go",
"Markdown"
] | 12 | Go | peterbradford/sprinklers | ca731e20d95ef945dd563d93fc58be73bfa37c70 | 9453a9b833efc39b9d4f26fcae97b8bdf22b11fc |
refs/heads/master | <file_sep>package servlet;
import domain.BankAccount;
import domain.Currency;
import domain.User;
import repository.BankAccountRepository;
import repository.UserRepository;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Optional;
import java.util.Random;
@WebServlet(name = "registrationServlet",urlPatterns = "/registration")
public class registrationServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
HttpSession session = request.getSession(true);
String email = request.getParameter("email");
String userPassword = request.getParameter("userPassword");
String firstName = request.getParameter("firstName");
String lastName = request.getParameter("lastName");
Optional<User> optionalUser = UserRepository.findByEmail(email, userPassword);
User user = null;
BankAccount bankAccount = null;
if(!optionalUser.isPresent()){
String accountNumber = ""+(new Random()).nextInt(10000);
Optional<BankAccount> optionalBankAccount = BankAccountRepository.findBankAccount(accountNumber);
if (!optionalBankAccount.isPresent()){
user = User.builder()
.firstName(firstName)
.lastName(lastName)
.email(email)
.password(<PASSWORD>)
.currency(Currency.PLN)
.build();
bankAccount = BankAccount.builder()
.user(user)
.currency(Currency.PLN)
.accountNumber(accountNumber)
.balance(new BigDecimal(0))
.freeFunds(new BigDecimal(0))
.build();
UserRepository.saveOrUpdate(user);
BankAccountRepository.saveOrUpdate(bankAccount);
}
}
((HttpServletResponse)response).sendRedirect("/login.jsp");
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
}
}
<file_sep>package domain;
import lombok.*;
import javax.persistence.*;
import java.io.Serializable;
@Entity
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class User implements Serializable{
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Long id;
String firstName;
String lastName;
String email;
Currency currency;
String accountNumber;
boolean activated;
String password;
@OneToOne(mappedBy = "user")
BankAccount bankAccount;
}
<file_sep>package domain;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
import java.io.Serializable;
import java.math.BigDecimal;
@Entity
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class BankAccount implements Serializable{
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Long id;
String accountNumber;
BigDecimal balance;
Currency currency;
BigDecimal freeFunds;
@OneToOne
@JoinColumn
User user;
}
<file_sep>package filters;
import domain.User;
import lombok.extern.slf4j.Slf4j;
import mailMethod.SendMail;
import javax.servlet.*;
import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
@WebFilter(filterName = "AuthenticationFilter",urlPatterns = "/account.jsp"
,dispatcherTypes = DispatcherType.FORWARD)
@Slf4j
public class AuthenticationFilter implements Filter {
public void destroy() {
}
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws ServletException, IOException {
HttpSession session = ((HttpServletRequest) req).getSession(false);
User user = null;
if(session!=null && session.getAttribute("user")!=null){
user=(User) session.getAttribute("user");
log.info("Uzytkownik "+user.getFirstName()+" posiada sesję");
if(user.isActivated()){
chain.doFilter(req, resp);
session.setAttribute("notActivated",false);
}else{
SendMail.sendNewMail(user);
session.setAttribute("notActivated",true);
((HttpServletResponse)resp).sendRedirect("/login.jsp");
}
}else{
// System.err.println("Nie ma użytkownika lub sesji");
log.info("Nie ma użytkownika lub sesji");
((HttpServletResponse)resp).sendRedirect("/login.jsp");
}
}
public void init(FilterConfig config) throws ServletException {
}
}
<file_sep>package servlet;
import domain.User;
import javax.mail.*;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Properties;
@WebServlet(name = "SendMailServlet",urlPatterns = "/sendConfEmail")
public class SendMailServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// String to = "<EMAIL>";
// String from ="<EMAIL>";
//// String host ="localhost";
// String login = "login";
// String password = "<PASSWORD>";
//
//
// Properties properties = System.getProperties();
// String host = "smtp.gmail.com";
// properties.put("mail.smtp.starttls.enable","true");
// properties.put("mail.smtp.host",host);
// properties.put("mail.smtp.user",from);
// properties.put("mail.smtp.password",<PASSWORD>);
// properties.put("mail.smtp.port",587);
// properties.put("mail.smtp.auth","true");
// Session session = Session.getInstance(properties, new Authenticator() {
// @Override
// protected PasswordAuthentication getPasswordAuthentication() {
// return new PasswordAuthentication(login,password);
// }
// });
// response.setContentType("text/html");
//
//// HttpSession httpSession = request.getSession(false);
//
// HttpSession UserSession = ((HttpServletRequest) request).getSession(false);
//// long uId = (Long)httpSession.getAttribute("userId");
// User user=(User) UserSession.getAttribute("user");
// Long uId = user.getId();
//// String uAC = user.getAccountNumber();
//
// PrintWriter printWriter = response.getWriter();
//
// try{
// MimeMessage message = new MimeMessage(session);
// message.setFrom(new InternetAddress(from));
// message.addRecipient(Message.RecipientType.TO, new InternetAddress(to));
// message.setSubject("Account confirmation");
// message.setText("http://localhost:8080/activationServlet?userId=" + uId);
//
// Transport.send(message);
// String title = "send emial";
// String res = "message sent";
// String docType = "<!doctype html public \"-//w3c//dtd html 4.0 " + "transitional//en\">\n";
//
// } catch (AddressException e) {
// e.printStackTrace();
// } catch (MessagingException e) {
// e.printStackTrace();
// }
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
}
}
<file_sep>package servlet;
import domain.User;
import repository.UserRepository;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Optional;
@WebServlet(name = "activationServlet",urlPatterns = "/activationServlet")
public class activationServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String userId = request.getParameter("userId");
String number = request.getParameter("number");
Long id = Long.parseLong(userId);
Optional<User> optionalUser = UserRepository.findUser(id);
User user = null;
if(optionalUser.isPresent()){
user=optionalUser.get();
String userNumber = user.getAccountNumber();
if(userNumber.equals(number)){
user.setActivated(true);
UserRepository.saveOrUpdate(user);
System.out.println("yupi ja ej mother fucker");
}
}
}
}
| 5b5bd5638f30ff6da822d363c3adf483df5c010c | [
"Java"
] | 6 | Java | ZayldeCurt2/BankWebApp | f6c97c327673ed7b06255457f6fdb0b280dd035a | 566245a85f9ff01eb2ab08976a7daed1922c63fc |
refs/heads/master | <file_sep># EXPLORERS-SYNERGY-
STACK - Nodejs, Mongodb.
Project based on CRUD operations
<file_sep>class User {
/**
* Constructor
* @param userId
* @param firstName
* @param lastName
* @param email
* @param address1
* @param address2
* @param city
* @param state
* @param zipCode
* @param country
*/
constructor(userId, firstName, lastName, email, address1, address2, city, state, zipCode, country) {
this._userId = userId;
this._firstName = firstName;
this._lastName = lastName;
this._email = email;
this._address1 = address1;
this._address2 = address2;
this._city = city;
this._state = state;
this._zipCode = zipCode;
this._country = country;
}
/**
*
* Getter and Setters
*/
get userId() {
return this._userId;
}
set userId(value) {
this._userId = value;
}
get firstName() {
return this._firstName;
}
set firstName(value) {
this._firstName = value;
}
get lastName() {
return this._lastName;
}
set lastName(value) {
this._lastName = value;
}
get email() {
return this._email;
}
set email(value) {
this._email = value;
}
get address1() {
return this._address1;
}
set address1(value) {
this._address1 = value;
}
get address2() {
return this._address2;
}
set address2(value) {
this._address2 = value;
}
get city() {
return this._city;
}
set city(value) {
this._city = value;
}
get state() {
return this._state;
}
set state(value) {
this._state = value;
}
get zipCode() {
return this._zipCode;
}
set zipCode(value) {
this._zipCode = value;
}
get country() {
return this._country;
}
set country(value) {
this._country = value;
}
/* get userProfile(){
return this._userProfile;
}
set userProfile(value){
this._userProfile = value;
} */
}
module.exports = User;
<file_sep>var connection = require('../model/connection');
//HARDCODED DATA
var data =[
{
'connectionID': 1,
'connectionName':'<NAME>',
'connectionType':'esports',
'hostName':'Naidu',
'imgUrl':'/images/axe.jpg',
'startTime':'17-10-2019 10:30am',
'endTime':'17-10-2019 10:30pm',
'location':'216 Barton Creek Drive',
'description':'Dota 2 is a multiplayer online battle arena (MOBA) video game in which two teams of five players compete to collectively destroy a large structure defended by the opposing team known as the "Ancient", whilst defending their own. This group is for Dota Newbee\'s. We will teach you tips and tricks of playing Dota2 free of money. Our main motto is to prepare people for international events'
},
{
'connectionID': 2,
'connectionName': '<NAME>',
'connectionType':'esports',
'hostName':'vamsi',
'imgUrl':'/images/lol.jpg',
'startTime':'20-10-2019 10:30am',
'endTime':'20-10-2019 10:30am',
'location':'130 woodward, UNC, charlotte, NC,28262',
'description':'Whether you\'re playing Solo or Co-op with friends, League of Legends is a highly competitive, fast paced action-strategy game designed for those who crave a hard fought victory. If you think you are strong enough please join our group and lets LOL whole day. Note:"if your MMR is less that 2,800 please don\'t attend this event."'
},
{
'connectionID':3,
'connectionName':'<NAME>',
'connectionType':'esports',
'hostName':'sankova',
'imgUrl':'/images/invoker.jpg',
'startTime':'18-10-2019 10:30am',
'endTime':'18-10-2019 10:30pm',
'location':'220 Barton creek Drive',
'description':'This event is solely for pro players in Dota, please makesure you MMR must be above 3000 to attend this event.Free drinks. You are advised to bring your own food. Invoker players have bonus perks.'
},
{
'connectionID':4,
'connectionName':'Football addicts',
'connectionType':'sports',
'hostName':'<NAME>',
'imgUrl':'/images/americanfootball.jpg',
'startTime':'18-10-2019 10:30am',
'endTime':'18-10-2019 10:30am',
'location':'benkirk stadium, UNC, Charlotte, NC.',
'description':'All the football lovers are invited. Free food and drinks. Everyone is adviced to carry their own football gear. You are not allowed to play if you gear doesn\'t meet our saftey requirements. Be good to your fellow mates. Be part of this event if you want to have a great day.'
},
{
'connectionID':5,
'connectionName':'Tennis tricks and tips',
'connectionType':'sports',
'hostName':'Jonathan',
'imgUrl':'/images/tennis.jpg',
'startTime':'18-10-2019 10:30am',
'endTime':'18-10-2019 10:30pm',
'location':'frisconatus ground, texas,73415',
'description':'I\'m coach jonathan. I\'ll teach you tips and tricks in tennis. If you like my way of teaching you can join my academy. I\'m not expecting everyone who attend\'s this event to join my academy. It\'s you decision whether to take or not. If you really interested in tennis, Don\'t worry I\'ll be there for you. Bonus: Free academy membership for 2 early birds.'
},
{
'connectionID':6,
'connectionName':'<NAME>',
'connectionType':'sports',
'hostName':'madonna',
'imgUrl':'/images/raquetball.jpg',
'startTime':'18-10-2019 10:30am',
'endTime':'18-10-2019 10:30pm',
'location':'nekamoora Indoor stadium',
'description':'looking for raquet ball enthusiast\'s. we have newly started Nekamoora indoor stadium which consists of 10 Raquet ball stadiums. Raquet ball gear is provided for free. Everything is free on this inauguration day. Free Lunchhh!!!'
}
];
// function to get connections from database
module.exports.getConnections = function()
{
let connections =[];
for(let i = 0; i< data.length; i++)
{
let x = connection.connection(
data[i].connectionID,
data[i].connectionName,
data[i].connectionType,
data[i].hostName,
data[i].imgUrl,
data[i].startTime,
data[i].endTime,
data[i].location,
data[i].description);
connections.push(x);
}
return connections;
};
//Function to Get item by ID;
module.exports.getConnection = function(connectionID) {
console.log("connection ID" + connectionID);
console.log(typeof(connectionID));
console.log(data.length);
if(connectionID <= data.length)
{
for(let i=0; i<data.length; i++){
console.log(typeof(data[i].connectionID));
if(parseInt(data[i].connectionID) == connectionID)
{
let x = connection.connection(
data[i].connectionID,
data[i].connectionName,
data[i].connectionType,
data[i].hostName,
data[i].imgUrl,
data[i].startTime,
data[i].endTime,
data[i].location,
data[i].description);
console.log(x);
return x;
}
}
}
return;
};
<file_sep>class Userconnection {
/**
* Constructor
* @param connectionID
* @param connectionType
* @param catalogCategory
* @param response
*/
constructor(connectionID, connectionName, connectionType, response) {
this._connectionID = connectionID;
this._connectionName = connectionName;
this._connectionType = connectionType;
this._response = response;
}
/**
*
* Getter and Setters
*/
get itemCode() {
return this._connectionID;
}
set itemCode(value) {
this._connectionID = value;
}
get itemName() {
return this._connectionName;
}
set itemName(value) {
this._connectionName = value;
}
get catalogCategory() {
return this._connectionType;
}
set catalogCategory(value) {
this._connectionType = value;
}
get response() {
return this._response;
}
set response(value) {
this._response = value;
}
get response() {
return this._response;
}
}
module.exports = Userconnection;
<file_sep>const connectionmodel = require('../model/connection');
const express = require('express');
const connectionDB = require('../utility/connectionDB');
const router = express.Router();
var User = require('../model/User');
var UserProfile = require('../model/Userprofile');
var Userconnection = require('../model/Userconnection');
var ProfileController = require('./profileController');
//SESSION data
//Function to render INDEX page
home = function(req,res)
{
res.render('index');
};
// Function to RENDER CONNECTIONS PAGE
connections = function(req,res)
{
var connectionTypes = getconnectionType();
var connections = connectionDB.getConnections();
console.log(connections);
var data ={
'types':connectionTypes,
'connections':connections
}
res.render('connections',{'data':data});
}
// Function to RENDER CONNECTIONS PAGE
connection = function(req,res)
{
res.render('newconnection');
}
// Function to RENDER CONNECTIONS PAGE
individualconnection = function(req,res)
{
let ID = req.params.connectionID;
var connection = connectionDB.getConnection(ID);
console.log(connection);
if(connection == undefined)
{
console.log('/');
res.redirect('/');
}
res.render('connection',{data:connection});
}
// Function to RENDER CONNECTIONS PAGE
userconnections = function(req,res)
{
res.render('myconnections');
};
// Function to RENDER CONNECTIONS PAGE
about = function(req,res)
{
res.render('about');
}
// Function to RENDER CONNECTIONS PAGE
contact = function(req,res)
{
res.render('contact');
}
// Function to dynamically adding catergories
var getconnectionType = function()
{
let connectionTypes = [];
let data = connectionDB.getConnections();
data.forEach(function(connection)
{
if(!connectionTypes.includes(connection.connectionType))
{
console.log(connection.connectionType)
connectionTypes.push(connection.connectionType);
}
});
return connectionTypes;
}
router.get('/',home);
router.get('/connections',connections);
router.get('/connections/createconnection' ,connection);
router.get('/connections/connection/:connectionID', individualconnection);
router.get('/myconnections',userconnections);
router.get('/about' ,about);
router.get('/contact',contact);
router.get('/*',function(req,res)
{
res.redirect('/');
});
module.exports = router;
// Function to RENDER INDEXPAGE
<file_sep>const express = require('express');
const controller = require('../controller/connectionsController');
const router = express.Router();
router.get('/',controller.home);
router.get('/connections',controller.connections);
router.get('/connections/createconnection' ,controller.connection);
router.get('/connections/connection/:connectionID', controller.individualconnection);
router.get('/myconnections',controller.userconnections);
router.get('/about' ,controller.about);
router.get('/contact',controller.contact);
module.exports = router;
<file_sep>var User = require('../model/User');
var Userconnection = require('../model/userconnection');
var Connection = require('../model/connection');
var Userprofile = require('../model/Userprofile');
var userData = [
{
userId: 1,
firstName: "<NAME>",
lastName: "kalidindi",
email: "<EMAIL>",
address1: "9539 University Terrace Dr",
address2: "Apt E",
city: "Charlotte",
state: "North Carolina",
zipCode: 28262,
country: "United States"
},
{
userId: 2,
firstName: "Sneha",
lastName: "lella",
email: "<EMAIL>",
address1: "216 Barton creek Dr",
address2: "Apt E",
city: "Charlotte",
state: "North Carolina",
zipCode: 28262,
country: "United States"
}
];
var userProfileData = [
{
userId: 1,
userconnectionlist: [
{
'connectionID': 1,
'connectionName':'Dota all day',
'connectionType':'esports',
'response': 'yes',
},
{
'connectionID': 6,
'connectionName':'Let\'s Raquet ball',
'connectionType':'sports',
'response': 'no',
}
]
},
{
userId: 2,
userconnectionlist: [
{
'connectionID': 4,
'connectionName':'Football addicts',
'connectionType':'sports',
'response': 'yes',
},
{
'connectionID': 5,
'connectionName':'Tennis tricks and tips',
'connectionType':'sports',
'response': 'no',
}
]
}
];
module.exports.getUsers = function () {
var users = [];
for (var i = 0; i < userData.length; i++) {
var user = new User(userData[i].userId,
userData[i].firstName,
userData[i].lastName,
userData[i].email,
userData[i].address1,
userData[i].address2,
userData[i].city,
userData[i].state,
userData[i].zipCode,
userData[i].country);
users.push(user);
}
return users;
};
module.exports.getUserProfiles = function() {
var userProfiles = [];
for (var i = 0; i < userProfileData.length; i++) {
var userProfile = new Userprofile(userProfileData[i].userId);
for(var j=0; j < userProfileData[i].userconnectionlist.length; j++){
var userconnection = new Userconnection(userProfileData[i].userconnectionlist[j].connectionID,
userProfileData[i].userconnectionlist[j].connectionName,
userProfileData[i].userconnectionlist[j].connectionType,
userProfileData[i].userconnectionlist[j].response);
userProfile.addConnection(userconnection);
}
userProfiles.push(userProfile);
}
return userProfiles;
};
module.exports.getUser = function (userId) {
for (var i = 0; i < userData.length; i++) {
if (parseInt(userData[i].userId) == userId) {
var user = new User(userData[i].userId,
userData[i].firstName,
userData[i].lastName,
userData[i].email,
userData[i].address1,
userData[i].address2,
userData[i].city,
userData[i].state,
userData[i].zipCode,
userData[i].country);
return user;
}
}
};
module.exports.getUserProfile = function (userId) {
for (var i = 0; i < userProfileData.length; i++) {
// console.log(typeof(userProfileData[i].userId));
if (parseInt(userProfileData[i].userId) === userId) {
var userProfile = new Userprofile(userProfileData[i].userId);
// console.log("user connection list",userProfileData[i].userconnectionlist.length);
for(var j=0; j < userProfileData[i].userconnectionlist.length; j++){
var userconnection = new Userconnection(userProfileData[i].userconnectionlist[j].connectionID,
userProfileData[i].userconnectionlist[j].connectionName,
userProfileData[i].userconnectionlist[j].connectionType,
userProfileData[i].userconnectionlist[j].response);
userProfile.addConnection(userconnection);
}
return userProfile;
}
}
};
| 6cd9318ffd1fd4cd91100f772ef65f7d8535e22a | [
"Markdown",
"JavaScript"
] | 7 | Markdown | spyla001/EXPLORERS-SYNERGY- | d69294baa089c30d782e0a9b9027eb0f37c098ea | 8759818a6bc6fa7b49cf1d27c0813823887f9d48 |
refs/heads/master | <file_sep>import speech_recognition as sr
from howToSpell import recognize_speech_from_mic as rs
from howToSpell import spellWord
from voiceSearch import search
from game import game
from os import system
import sys
def getCommand():
print("Say a command")
system("say " + "Start")
said = rs(recognizer, microphone)
while not said in ["search", "play", "spell"]:
if said["transcription"]:
break
if not said["success"]:
break
print("I didn't catch that. What did you say?\n")
system("say " + "I didn't catch that. What did you say?")
getCommand()
query = said["transcription"].lower()
split = query.split(" ")
if split[0] == "search":
search(query.partition("search")[2])
elif split[0] == "play":
game(recognizer,microphone)
elif split[0] == "spell":
spellWord(query.partition("spell ")[2])
elif split[0] == "exit":
sys.exit()
if __name__ == "__main__":
recognizer = sr.Recognizer()
microphone = sr.Microphone()
print("Working...")
while True:
getCommand()<file_sep>import wikipedia
import os
import speech_recognition as sr
from howToSpell import recognize_speech_from_mic as rs
#import pyttsx
from gtts import gTTS
def censor_string(txt, lst, char):
return " ".join(char*len(word) if word in lst else word for word in txt.split() )
if __name__ == "__main__":
#engine = pyttsx.init()
recognizer = sr.Recognizer()
microphone = sr.Microphone()
print("Working...")
print("Say what you want to search")
said = rs(recognizer,microphone)
if said["transcription"]:
query = said["transcription"]
print("Searching: " + said["transcription"])
summary = wikipedia.summary(query,sentences=2)
myobj = gTTS(text=summary, lang='en', slow=False)
myobj.save("welcome.mp3")
os.system("afplay welcome.mp3")
#engine.say(summary)
#engine.runAndWait()
def search(query):
summary = wikipedia.summary(query,sentences=2)
myobj = gTTS(text=summary, lang='en', slow=False)
myobj.save("welcome.mp3")
os.system("afplay welcome.mp3")
<file_sep># assistant
## running
You can run each of the python files separately for each function or you can just run `assistant.py` to access all.
## about
This is a very rudimentry voice assistant using python3.
## commands
### search
saying "search foo" will search the term "foo" in wikipedia and speak out the first two sentances.
### spell
saying "spell foo" will spell out the word "foo"(f-o-o).
### play
saying play will trigger a built in game using speech recognition
| 27922a0f0ee6ec08cf4836e5eced350f85631cc8 | [
"Markdown",
"Python"
] | 3 | Python | AkshatAdsule/assistant | e5c4a1b6862ee3b8aed65e1e0fe50db076d0dd10 | 944dd390167d6e6baef08e2ad8a0c8a8287bbce4 |
refs/heads/master | <repo_name>GunshipPenguin/term3-ids<file_sep>/src/TileDrawable.cpp
#include "TileDrawable.h"
void TileDrawable::setTileSet(TileSet* tileSet) {
tileSet_ = tileSet;
return;
}
<file_sep>/src/CreepPathfinder.h
#ifndef PATH_H
#define PATH_H
#include <iostream>
#include <vector>
#include "Tile.h"
#include "TileMap.h"
class CreepPathfinder {
public:
void updatePaths();
int getNextByID(int);
void setTileMap(const TileMap* tileMap);
int getNumTilesX();
private:
int getYById(int,int);
int getXById(int,int);
int exitTile_;
const TileMap* tiles_;
std::vector<int> directedPathGraph_;
};
#endif
<file_sep>/src/Tower.cpp
/*
* Tower.cpp
*
* Created on: Jun 15, 2016
* Author: rhys
*/
#include "Tower.h"
Tower::Tower() {
// TODO Auto-generated constructor stub
}
Tower::~Tower() {
// TODO Auto-generated destructor stub
}
<file_sep>/src/MenuScreen.cpp
#include "MenuScreen.h"
#include <string>
#include <iostream>
#include <algorithm>
int MenuScreen::run(sf::RenderWindow& window) {
std::string currMapPath;
sf::Vector2u windowSize = window.getSize();
sf::Font font;
sf::Text text;
if (!font.loadFromFile("../res/open-sans.semibold.ttf")) {
return FONT_LOAD_FAILED;
}
text.setFont(font);
text.setCharacterSize(std::max(static_cast<int>(windowSize.x / 50.0), 10));
sf::Event event;
while (window.isOpen()) {
while (window.pollEvent(event)) {
switch (event.type) {
case sf::Event::TextEntered:
// Using only ASCII characters
if (event.text.unicode < 128) {
// Backspace character is 8, removes last character in path if path has characters
if (event.text.unicode == 8 and currMapPath.size() > 0) {
currMapPath.erase(currMapPath.size() - 1);
} else if (event.text.unicode != 8) {
currMapPath += static_cast<char>(event.text.unicode);
}
}
case sf::Event::KeyPressed:
//submits path if enter pressed
if (event.key.code == sf::Keyboard::Return) {
mapPath_ = currMapPath.c_str();
return LOAD_MAP;
}
default:
break;
}
text.setString(currMapPath);
text.setPosition(0, windowSize.y / 2.0);
window.clear(sf::Color::Black);
window.draw(text);
window.display();
}
}
return LOGIC_ERROR;
}
const char* MenuScreen::getMapPath() {
return mapPath_;
}
<file_sep>/src/Screen.h
#include<SFML/Graphics.hpp>
#ifndef SCREEN_H
#define SCREEN_H
class Screen {
public:
virtual int run(sf::RenderWindow &window) = 0;
};
#endif
<file_sep>/src/CreepPathfinder.cpp
#include <vector>
#include <queue>
#include <iostream>
#include "CreepPathfinder.h"
#include "Tile.h"
void CreepPathfinder::updatePaths() {
std::queue<int> q;
int currTile;
int numTilesX = tiles_->getNumTilesX();
int numTilesY = tiles_->getNumTilesY();
std::vector<Tile> tileMap = tiles_->getTiles();
std::vector<int> positions(numTilesX*numTilesY,-1);
for (size_t i = 0; i < tileMap.size(); ++i) {
if (tileMap.at(i).isCreepExit()) {
exitTile_ = i;
}
}
q.push(exitTile_);
positions[exitTile_] = exitTile_;
while (!q.empty()) {
currTile = q.front();
q.pop();
if (!tileMap.at(currTile).isBuilt() and tileMap.at(currTile).isCreepWalkable()) {
if (getYById(currTile,numTilesX) != 0 and positions.at(currTile-numTilesX) == -1) {
q.push(currTile-numTilesX);
positions[currTile-numTilesX] = currTile;
}
if (getXById(currTile,numTilesX)!= 0 and positions.at(currTile-1) == -1) {
q.push(currTile-1);
positions[currTile-1] = currTile;
}
if (getYById(currTile,numTilesX) < (numTilesY-1) and positions.at(currTile+numTilesX) == -1) {
q.push(currTile + numTilesX);
positions[currTile+numTilesX] = currTile;
}
if (getXById(currTile,numTilesX) != (numTilesX-1) and positions.at(currTile+1) == -1) {
q.push(currTile + 1);
positions[currTile+1] = currTile;
}
}
}
directedPathGraph_ = positions;
}
void CreepPathfinder::setTileMap(const TileMap* tileMap) {
tiles_ = tileMap;
}
int CreepPathfinder::getNumTilesX() {
return (*tiles_).getNumTilesX();
}
int CreepPathfinder::getNextByID(int ID) {
return directedPathGraph_.at(ID);
}
int CreepPathfinder::getXById(int Id, int numTilesX) {
return Id%numTilesX;
}
int CreepPathfinder::getYById(int Id, int numTilesX) {
return Id/numTilesX;
}
<file_sep>/src/GameScreen.cpp
#include "GameScreen.h"
#include "Logger.h"
#include "Tile.h"
#include "TileMap.h"
#include "TileSet.h"
#include "TileDrawable.h"
#include "Wave.h"
#include "Creep.h"
#include "CreepPathfinder.h"
#include <string>
#include <map>
#include <sstream>
#include <iostream>
#include <vector>
#include <tinyxml2.h>
#include <SFML/Graphics.hpp>
int GameScreen::run(sf::RenderWindow &window) {
if (! loadTileMap()) {
Logger::log("GameScreen could not load tiles");
return TILEMAP_LOAD_ERROR;
}
// Set up CreepPathfinder object
CreepPathfinder creepPathfinder;
creepPathfinder.setTileMap(&tileMap_);
creepPathfinder.updatePaths();
Creep::setCreepPathfinder(creepPathfinder);
// Load Creeps
if (! loadCreeps()) {
Logger::log("GameScreen could not load creeps");
return CREEP_LOAD_ERROR;
}
// Load Waves
if (! loadWaves()) {
Logger::log("Waves could not be loaded");
return WAVE_LOAD_ERROR;
}
// Set up map and window views
sf::View mapView = getMapView(window.getSize().x, window.getSize().y);
sf::View menuView = getMenuView(window.getSize().x, window.getSize().y);
// Event object to poll for
sf::Event event;
// Menu shape to fill menuView
sf::RectangleShape menuShape(menuView.getSize());
menuShape.setFillColor(sf::Color::Blue);
// Main loop
while (true) {
// Update time delta and wave
Updateable::updateDelta();
if (waveOngoing_)
waves_.front().update();
lives_ -= waves_.front().getCreepsLeakedSinceLastUpdate();
// Check to see if the current wave is defeated
if (waveOngoing_ && waves_.front().isDefeated()) {
waveOngoing_ = false;
waves_.pop();
if (waves_.empty()) {
Logger::log("All waves defeated");
return 0;
}
}
// Handle events
while (window.pollEvent(event)) {
switch (event.type) {
case sf::Event::Closed:
window.close();
return 0;
case sf::Event::Resized:
mapView = getMapView(window.getSize().x, window.getSize().y);
menuView = getMenuView(window.getSize().x, window.getSize().y);
menuShape.setSize(menuView.getSize());
break;
case sf::Event::KeyPressed:
if (event.key.code == sf::Keyboard::Space)
waveOngoing_ = true;
break;
default:
break;
}
}
// Clear screen before drawing anything
window.clear(sf::Color::Black);
// Draw map
window.setView(mapView);
tileMap_.draw(window);
if (waveOngoing_)
waves_.front().draw(window);
// Draw menu
window.setView(menuView);
menuShape.setPosition(0, 0);
window.draw(menuShape);
window.display();
}
return 0;
}
sf::View GameScreen::getMenuView(int screenWidth, int screenHeight) {
sf::View menuView(sf::FloatRect(0, 0, screenWidth*MENU_SIZE, screenHeight));
menuView.setViewport(sf::FloatRect(1-MENU_SIZE, 0, MENU_SIZE, 1));
return menuView;
}
sf::View GameScreen::getMapView(int screenWidth, int screenHeight) {
// Fit the mapView around all the tiles
sf::View mapView(sf::FloatRect(0, 0,
tileMap_.getNumTilesX()*TileSet::getDrawnSize(),
tileMap_.getNumTilesY()*TileSet::getDrawnSize()));
int xAvaliable = screenWidth * (1-MENU_SIZE);
float windowRatio = xAvaliable / (float) screenHeight;
float viewRatio = mapView.getSize().x / (float) mapView.getSize().y;
float sizeX = 1;
float sizeY = 1;
float posX = 0;
float posY = 0;
bool horizontalSpacing = true;
if (windowRatio < viewRatio)
horizontalSpacing = false;
if (horizontalSpacing) {
posY = 0;
sizeY = 1;
sizeX = (sizeY*screenHeight*viewRatio)/screenWidth;
posX = (1-MENU_SIZE-sizeX)/2;
} else {
posX = 0;
sizeX = (1-MENU_SIZE);
sizeY = (sizeX*screenWidth*viewRatio)/screenHeight;
posY= (1-sizeY) / 2;
}
mapView.setViewport(sf::FloatRect(posX, posY, sizeX, sizeY));
return mapView;
}
bool GameScreen::loadWaves() {
// Load XML document
tinyxml2::XMLDocument doc;
std::string creepsFilePath = mapPath_ + "/waves.xml";
if(doc.LoadFile(creepsFilePath.c_str()) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not load waves.xml");
return false;
}
// Get waves element
tinyxml2::XMLElement* wavesElement = doc.FirstChildElement("waves");
tinyxml2::XMLElement* currWaveElement = wavesElement->FirstChildElement();
// Loop through and load all waves
while(true) {
if (currWaveElement == 0) {
break;
}
// Get wave entry speed
int entrySpeed;
if(currWaveElement->QueryIntAttribute("entrySpeed", &entrySpeed) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not find entry speed of wave in waves.xml");
return false;
}
// Build vector of wave creeps
std::string creepIdString = currWaveElement->GetText();
creepIdString.erase(std::remove(creepIdString.begin(),
creepIdString.end(), '\n'), creepIdString.end());
creepIdString.erase(std::remove(creepIdString.begin(),
creepIdString.end(), '\t'), creepIdString.end());
std::istringstream ss(creepIdString);
std::string creepId;
std::vector<Creep> creepVector;
while(std::getline(ss, creepId, ',')) {
if (loadedCreeps_.find(creepId) == loadedCreeps_.end()) {
Logger::log("Creep with id of " + creepId + " does not exist");
return false;
}
// TODO build up creepVector once Devin fixes up Creeps
Creep currCreep = loadedCreeps_[creepId];
creepVector.push_back(currCreep);
}
waves_.push(Wave(creepVector, entrySpeed));
currWaveElement = currWaveElement->NextSiblingElement();
}
return true;
}
bool GameScreen::loadCreeps() {
// Load XML document
tinyxml2::XMLDocument doc;
std::string creepsFilePath = mapPath_ + "/creeps.xml";
if(doc.LoadFile(creepsFilePath.c_str()) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not load creeps.xml");
return false;
}
// Get creeps element
tinyxml2::XMLElement* creepsElement = doc.FirstChildElement("creeps");
tinyxml2::XMLElement* currCreepElement = creepsElement->FirstChildElement();
// Find creepSpawn
size_t i;
int creepSpawn;
for(i=0;i<tileMap_.getTiles().size();i++) {
if(tileMap_.getTiles().at(i).isCreepSpawn()) {
creepSpawn = static_cast<int>(i);
break;
}
}
// Loop through and load all creeps
while(true) {
if (currCreepElement == 0) {
break;
}
// Get creep id
const char* creepIdCharArray;
creepIdCharArray = currCreepElement->Attribute("id");
if (! creepIdCharArray) {
Logger::log("Creep id does not exist in creeps.xml");
return false;
}
std::string creepId(creepIdCharArray);
if(creepId.empty()) {
Logger::log("Empty creep id in creeps.xml");
return false;
}
// Get creep speed
int creepSpeed;
if(currCreepElement->QueryIntAttribute("speed", &creepSpeed) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not find speed of creep in creeps.xml");
return false;
}
int creepHp;
if(currCreepElement->QueryIntAttribute("hp", &creepHp) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not find hp of creep in creeps.xml");
return false;
}
// Get creep tileSize
int creepTileSize;
if(currCreepElement->QueryIntAttribute("tileSize", &creepTileSize) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not find tileSize of creep in creeps.xml");
return false;
}
// Get creep texture file name
const char* creepTextureFileNameCharArray = currCreepElement->Attribute("texture");;
if (! creepTextureFileNameCharArray) {
Logger::log("Creep texture file name does not exist in creeps.xml");
return false;
}
std::string creepTextureFileName(creepTextureFileNameCharArray);
// Load creep texture
sf::Texture creepTexture;
std::string textureDir = mapPath_ + "/res/" + creepTextureFileName;
if (!creepTexture.loadFromFile(textureDir)) {
Logger::log("Could not load creep texture in creeps.xml");
return false;
}
TileSet creepTileSet = TileSet(creepTexture, creepTileSize);
creepTileSets_[creepId] = creepTileSet;
Creep creep = Creep(creepSpawn, creepHp, creepSpeed, creepId);
creep.setTileSet(&creepTileSets_[creepId]);
loadedCreeps_.insert(std::pair<std::string, Creep> (creepId, creep));
currCreepElement = currCreepElement->NextSiblingElement();
}
return true;
}
bool GameScreen::loadTileMap() {
// Load XML document
tinyxml2::XMLDocument doc;
std::string tileMapPath = mapPath_ + "/tilemap.xml";
if(doc.LoadFile(tileMapPath.c_str()) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not load tilemap.xml");
return false;
}
// Get tile_map element
tinyxml2::XMLElement* tileMapElement = doc.FirstChildElement("tile_map");
// Get width and height from the tile_map element
int width, height;
if(tileMapElement->QueryIntAttribute("width", &width) != tinyxml2::XML_NO_ERROR){
Logger::log("Could not find width attribute of tile_map element");
}
if (tileMapElement->QueryIntAttribute("height", &height)) {
Logger::log("Could not find height attribute of tile_map element");
}
int numTilesX = width;
int numTilesY = height;
// Load the size of the tileset to be used as the drawn size for all tiles
int tileSize;
tinyxml2::XMLElement* tileSetElement = tileMapElement->FirstChildElement(
"tile_set");
if(tileSetElement->QueryIntAttribute("size", &tileSize) != tinyxml2::XML_NO_ERROR) {
Logger::log("Could not find size element of tile_set element");
return false;
}
TileSet::setDrawnSize(tileSize);
tileSet_.setTileSize(tileSize);
// Load TileSet texture
const char* tileSetTexture = tileSetElement->Attribute("src");
if (tileSetTexture == NULL) {
Logger::log("No path to texture specified by src attribute of tile_set element");
return false;
}
std::string textureDir = mapPath_ + "/res/" + tileSetElement->Attribute("src");
if (tilesTexture_.loadFromFile(textureDir) == false) {
Logger::log("Could not load tile_set texture specified in src attribute");
return false;
}
tilesTexture_.setSmooth(false);
tileSet_.setTexture(tilesTexture_);
// Load creep walkable tiles
std::string creepWalkableTilesString(tileSetElement->Attribute("creep_walkable"));
std::vector<int> creepWalkableTiles = tokenizeIntString(creepWalkableTilesString, ",");
// Load buildable tiles
std::string buildableTilesString(tileSetElement->Attribute("buildable"));
std::vector<int> buildableTiles = tokenizeIntString(buildableTilesString, ",");
// Load creep spawn tile
int creepSpawnTile;
if (tileMapElement->FirstChildElement("creep_spawn")->QueryIntAttribute("tile", &creepSpawnTile)
!= tinyxml2::XML_NO_ERROR) {
Logger::log("Could not load info from creep_spawn attribute in tilemap.xml");
return false;
}
creepSpawnTile --;
// Load creep exit tile
int creepExitTile;
if (tileMapElement->FirstChildElement("creep_exit")->QueryIntAttribute("tile", &creepExitTile)
!= tinyxml2::XML_NO_ERROR) {
Logger::log("Could not load info from creep_exit attribute in tilemap.xml");
return false;
}
creepExitTile --;
// Load tile id data
tinyxml2::XMLElement* tileDataElement = tileMapElement->FirstChildElement(
"tile_data");
std::string tileDataString(tileDataElement->GetText());
// Fill up the tiles vector
std::vector<int> idVector = tokenizeIntString(tileDataString, "\n,");
std::vector<Tile> tiles;
Tile currTile;
for(size_t i=0;i<idVector.size();i++) {
currTile = Tile();
currTile.setId(idVector.at(i));
currTile.setTileSet(&tileSet_);
if (std::find(creepWalkableTiles.begin(),
creepWalkableTiles.end(), idVector.at(i)) != creepWalkableTiles.end()) {
currTile.setCreepWalkable(true);
}
if (std::find(buildableTiles.begin(),
buildableTiles.end(), idVector.at(i)) != buildableTiles.end()) {
currTile.setBuildable(true);
}
tiles.push_back(currTile);
}
// Ensure that the number of tiles specified is equal to numTilesX_ * numTilesY_
if (tiles.size() != (size_t) numTilesX * numTilesY) {
Logger::log("Number of tiles in tilemap.xml differs "
"from the amount specified in width and height attribute of tile_map");
return false;
}
// Mark creep spawn
tiles.at(creepSpawnTile).setCreepSpawn(true);
// Mark creep exit
tiles.at(creepExitTile).setCreepExit(true);
// Create TileMap object
tileMap_.setTiles(tiles);
tileMap_.setNumTilesX(numTilesX);
tileMap_.layoutTiles(TileSet::getDrawnSize());
return true;
}
std::vector<int> GameScreen::tokenizeIntString(std::string str, std::string delimeters) {
std::string currToken;
int intToken;
std::string::size_type lastPos = str.find_first_not_of(delimeters, 0);
std::string::size_type pos = str.find_first_of(delimeters, lastPos);
std::vector<int> tokenVector;
while (std::string::npos != pos || std::string::npos != lastPos) {
currToken = str.substr(lastPos, pos - lastPos);
intToken = atoi(currToken.c_str());
// Skip delimiters
lastPos = str.find_first_not_of(delimeters, pos);
pos = str.find_first_of(delimeters, lastPos);
// Special case where currToken is the final tab
if (intToken == 0)
continue;
tokenVector.push_back(intToken);
}
return tokenVector;
}
void GameScreen::setMapPath(std::string mapPath) {
mapPath_ = mapPath;
return;
}
<file_sep>/src/Drawable.h
#ifndef DRAWABLE_H
#define DRAWABLE_H
#include <SFML/Graphics.hpp>
class Drawable {
public:
void setPosition(sf::Vector2f);
sf::Vector2f getPosition();
virtual void draw(sf::RenderWindow&) = 0;
private:
sf::Vector2f position_;
};
#endif
<file_sep>/src/Collideable.h
#ifndef COLLIDEABLE_H
#define COLLIDEABLE_H
#include <SFML/Graphics.hpp>
class Collideable {
public:
sf::FloatRect getCollisionRect();
void setCollisionRect(sf::FloatRect);
private:
sf::FloatRect collisionRect_;
};
#endif
<file_sep>/src/Wave.cpp
#include "Wave.h"
const std::vector<Creep>& Wave::getActiveCreeps() {
return activeCreeps_;
}
int Wave::getCreepsLeakedSinceLastUpdate() {
return creepsLeakedSinceLastUpdate_;
creepsLeakedSinceLastUpdate_ = 0;
}
bool Wave::isDefeated() {
return activeCreeps_.empty() && waitingCreeps_.empty();
}
void Wave::sendCreep() {
if (waitingCreeps_.empty()) {
return;
}
Creep creepToSend = waitingCreeps_.back();
waitingCreeps_.pop_back();
activeCreeps_.push_back(creepToSend);
return;
}
void Wave::update() {
// Call update on all creeps, remove leaked creeps
size_t i;
for(i=0;i<activeCreeps_.size();i++) {
activeCreeps_.at(i).update();
}
std::vector<Creep>::iterator it = activeCreeps_.begin();
while(it != activeCreeps_.end()) {
if( it->isLeaked() ) {
it = activeCreeps_.erase(it);
creepsLeakedSinceLastUpdate_ ++;
} else
++it;
}
// Send new creeps
timeSinceLastCreep_ += getDelta();
if (timeSinceLastCreep_ >= entrySpeed_ && !waitingCreeps_.empty()) {
timeSinceLastCreep_ = 0;
sendCreep();
}
return;
}
void Wave::draw(sf::RenderWindow &window) {
for(size_t i=0;i<activeCreeps_.size();i++) {
activeCreeps_.at(i).draw(window);
}
return;
}
<file_sep>/src/ResourceManager.h
#ifndef RESOURCEMANAGER_H
#define RESOURCEMANAGER_H
#include <string>
class ResourceManager {
public:
static void setBinPath(std::string);
std::string getResourcePath();
private:
std::string goUpNDirs(std::string, int);
static std::string binPath_;
};
#endif
<file_sep>/src/TileSet.h
#ifndef TILESET_H
#define TILESET_H
#include <SFML/Graphics.hpp>
class TileSet {
public:
TileSet() :
texture_(sf::Texture()),
tileSize_(16) {};
TileSet(sf::Texture texture, int tileSize) :
texture_(texture),
tileSize_(tileSize) {};
sf::Sprite getSpriteById(int);
void setTileSize(int);
void setTexture(sf::Texture&);
static void setDrawnSize(int);
static int getDrawnSize();
private:
sf::Texture texture_;
int tileSize_;
static int drawnSize_;
};
#endif
<file_sep>/src/Tile.h
#ifndef TILE_H
#define TILE_H
#include <SFML/Graphics.hpp>
#include "TileDrawable.h"
#include "TileSet.h"
class Tile : public TileDrawable {
public:
void setId(int);
void setCreepExit(bool);
void setCreepSpawn(bool);
void setCreepWalkable(bool);
void setBuilt(bool);
void setBuildable(bool);
void setSprite(sf::Sprite);
int getId() const;
bool isCreepExit() const ;
bool isCreepSpawn() const;
bool isCreepWalkable() const;
bool isBuildable() const;
bool isBuilt() const;
void draw(sf::RenderWindow&);
private:
int id_;
bool creepSpawn_;
bool creepExit_;
bool buildable_;
bool creepWalkable_;
bool built_;
};
#endif
<file_sep>/src/Tower.h
#ifndef TOWER_H
#define TOWER_H
class Tower : public TileDrawable, public Updateable {
public:
Tower();
void update();
};
#endif
<file_sep>/src/Tile.cpp
#include "Tile.h"
#include <SFML/Graphics.hpp>
#include <iostream>
#include "TileDrawable.h"
void Tile::setId(int id) {
id_ = id;
return;
}
void Tile::draw(sf::RenderWindow &window) {
if(tileSet_) {
sf::Sprite sprite = tileSet_->getSpriteById(id_);
sprite.setPosition(getPosition());
window.draw(sprite);
}
return;
}
int Tile::getId() const {
return id_;
}
void Tile::setCreepExit(bool creepExit) {
creepExit_ = creepExit;
}
void Tile::setCreepSpawn(bool spawn) {
creepSpawn_ = spawn;
}
void Tile::setCreepWalkable(bool creepWalkable) {
creepWalkable_ = creepWalkable;
}
void Tile::setBuilt(bool built) {
built_ = built;
}
void Tile::setBuildable(bool buildable) {
buildable_ = buildable;
}
bool Tile::isCreepExit() const {
return creepExit_;
}
bool Tile::isCreepSpawn() const {
return creepSpawn_;
}
bool Tile::isCreepWalkable() const {
return creepWalkable_;
}
bool Tile::isBuilt() const {
return built_;
}
bool Tile::isBuildable() const {
return buildable_;
}
<file_sep>/src/Creep.h
#ifndef CREEP_H
#define CREEP_H
#include "Updateable.h"
#include "Tile.h"
#include <vector>
#include <cmath>
#include <string>
#include <SFML/Graphics.hpp>
#include "CreepPathfinder.h"
#include "TileDrawable.h"
class Creep: public Updateable, public TileDrawable {
public:
Creep();
Creep(int,int,double,std::string);
sf::Vector2f getFuturePosition(double timeDelta);
float getXPosition();
float getYPosition();
bool isLeaked();
void update();
std::string getId();
void draw(sf::RenderWindow&);
static void setCreepPathfinder(CreepPathfinder&);
private:
bool isAbove(int,int);
bool isBelow(int,int);
bool isLeft(int,int);
bool isRight(int,int);
static CreepPathfinder paths_;
int comingFrom_;
int hp_;
double speed_;//represented as a fraction of a tile per second
std::string id_;
bool leaked_;
};
#endif
<file_sep>/src/GameScreen.h
#ifndef GAMESCREEN_H
#define GAMESCREEN_H
#include "Screen.h"
#include "Tile.h"
#include "Creep.h"
#include "TileMap.h"
#include "Wave.h"
#include <map>
#include <vector>
#include <queue>
#include <string>
#include <SFML/Graphics.hpp>
class GameScreen: public Screen {
public:
GameScreen() :
waveOngoing_(false),
lives_(20) {};
virtual int run(sf::RenderWindow&);
void setMapPath(std::string);
std::vector<Tile> getTileMap();
int getNumTilesX();
int getNumTilesY();
int getTileSize();
static const float MENU_SIZE = 0.2;
static const int TILEMAP_LOAD_ERROR = 1;
static const int CREEP_LOAD_ERROR = 2;
static const int WAVE_LOAD_ERROR = 3;
private:
bool loadCreeps();
bool loadTileMap();
bool loadWaves();
void nextWave();
sf::View getMenuView(int, int);
sf::View getMapView(int, int);
std::vector<int> tokenizeIntString(std::string, std::string);
std::string mapPath_;
sf::Texture tilesTexture_;
TileMap tileMap_;
TileSet tileSet_;
bool waveOngoing_;
std::queue<Wave> waves_;
std::map<std::string, Creep> loadedCreeps_;
std::map<std::string, TileSet> creepTileSets_;
int lives_;
};
#endif
<file_sep>/src/Logger.cpp
#include "Logger.h"
#include <string>
#include <iostream>
void Logger::log(std::string logMsg) {
std::cout << logMsg << std::endl;
}
<file_sep>/src/Creep.cpp
#include "Updateable.h"
#include "Tile.h"
#include "Creep.h"
#include <vector>
#include <cmath>
#include <SFML/Graphics.hpp>
#include "CreepPathfinder.h"
#include "TileDrawable.h"
CreepPathfinder Creep::paths_;
Creep::Creep() {}
Creep::Creep(int spawn, int hp, double speed, std::string id) {//tilesize ->drawnTileSize
int xPos = (spawn%paths_.getNumTilesX())*TileSet::getDrawnSize();
int yPos = (spawn/paths_.getNumTilesX())*TileSet::getDrawnSize();
setPosition(sf::Vector2f(xPos, yPos));
speed_ = speed;
hp_ = hp;
id_ = id;
comingFrom_ = spawn;
leaked_ = false;
}
std::string Creep::getId() {
return id_;
}
bool Creep::isLeaked() {
return leaked_;
}
bool Creep::isAbove(int first, int second) {
if (first == second - paths_.getNumTilesX())
return true;
return false;
}
bool Creep::isBelow(int first, int second) {
if (first == second + paths_.getNumTilesX())
return true;
return false;
}
bool Creep::isLeft(int first, int second) {
if (first == second - 1)
return true;
return false;
}
bool Creep::isRight(int first, int second) {
if (first == second + 1)
return true;
return false;
}
sf::Vector2f Creep::getFuturePosition(double timeDelta) {
int drawnSize = TileSet::getDrawnSize();
sf::Vector2f pos = getPosition();
float distanceDelta(timeDelta*speed_*drawnSize);
while (distanceDelta > 0) {
int goingTo = paths_.getNextByID(comingFrom_);
if (isLeft(comingFrom_,goingTo)) { //coming from left of goingTo
float distanceIntoTile = pos.x - (comingFrom_%paths_.getNumTilesX())*drawnSize;
if (distanceDelta >= drawnSize - distanceIntoTile) {
distanceDelta -= (drawnSize-distanceIntoTile);
pos.x = (goingTo%paths_.getNumTilesX())*drawnSize;
comingFrom_ = goingTo;
}else {
pos.x += distanceDelta;
distanceDelta = 0;
}
}else if (isRight(comingFrom_,goingTo)) { //coming from right of goingTo
float distanceIntoTile = (comingFrom_%paths_.getNumTilesX())*drawnSize - pos.x;
if (distanceDelta >= drawnSize - distanceIntoTile) {
distanceDelta -= (drawnSize - distanceIntoTile);
pos.x = (goingTo%paths_.getNumTilesX())*drawnSize;
comingFrom_ = goingTo;
}else {
pos.x -= distanceDelta;
distanceDelta = 0;
}
}else if (isAbove(comingFrom_,goingTo)) {//coming from above goingTo
float distanceIntoTile = pos.y - comingFrom_/paths_.getNumTilesX() * drawnSize;
if (distanceDelta >= drawnSize - distanceIntoTile) {
distanceDelta -= (drawnSize - distanceIntoTile);
pos.y = (goingTo/paths_.getNumTilesX()) * drawnSize;
comingFrom_ = goingTo;
}else {
pos.y += distanceDelta;
distanceDelta = 0;
}
}else if (isBelow(comingFrom_,goingTo)) {//coming from below goingTo
float distanceIntoTile = comingFrom_/paths_.getNumTilesX() * drawnSize - pos.y;
if (distanceDelta >= drawnSize - distanceIntoTile) {
distanceDelta -= (drawnSize - distanceIntoTile);
pos.y = (goingTo/paths_.getNumTilesX()) * drawnSize;
comingFrom_ = goingTo;
}else {
pos.y -= distanceDelta;
distanceDelta = 0;
}
}else if (comingFrom_ == goingTo) {
distanceDelta = 0;
leaked_ = true;
}
}
return pos;
}
void Creep::draw(sf::RenderWindow &window) {
if (tileSet_) {
sf::Sprite creepSprite = tileSet_->getSpriteById(1);
sf::Vector2f position = getPosition();
int drawnSize = TileSet::getDrawnSize();
creepSprite.setOrigin(drawnSize/2.f,drawnSize/2.f);
position.x += drawnSize/2.f;
position.y += drawnSize/2.f;
creepSprite.setPosition(position);
int goingTo = paths_.getNextByID(comingFrom_);
if (isAbove(comingFrom_,goingTo)) {
creepSprite.setRotation(180);
}else if (isBelow(comingFrom_,goingTo)) {
creepSprite.setRotation(0);
}else if (isLeft(comingFrom_,goingTo)) {
creepSprite.setRotation(90);
}else if (isRight(comingFrom_,goingTo)) {
creepSprite.setRotation(270);
}
window.draw(creepSprite);
}
return;
}
void Creep::update() {
setPosition(getFuturePosition(getDelta()));
return;
}
void Creep::setCreepPathfinder(CreepPathfinder& path) {
paths_ = path;
}
<file_sep>/src/TileSet.cpp
#include "TileSet.h"
int TileSet::drawnSize_;
sf::Sprite TileSet::getSpriteById(int id) {
sf::IntRect subRect;
subRect.left = ((id-1)%(texture_.getSize().x / tileSize_))*tileSize_;
subRect.top = ((id-1)/(texture_.getSize().x / tileSize_))*tileSize_;
subRect.width = tileSize_;
subRect.height = tileSize_;
sf::Sprite tileSprite(texture_, subRect);
float scale = drawnSize_ / tileSprite.getLocalBounds().width;
tileSprite.setScale(scale, scale);
return tileSprite;
}
void TileSet::setTexture(sf::Texture& texture) {
texture_ = texture;
return;
}
void TileSet::setTileSize(int tileSize) {
tileSize_ = tileSize;
return;
}
void TileSet::setDrawnSize(int drawnSize) {
drawnSize_ = drawnSize;
return;
}
int TileSet::getDrawnSize() {
return drawnSize_;
}
<file_sep>/src/main.cpp
#include <SFML/Graphics.hpp>
#include <vector>
#include <string>
#include "MenuScreen.h"
#include "GameScreen.h"
#include "Logger.h"
#include "ResourceManager.h"
#include <iostream>
int main(int argc, char* argv[]) {
int width = 300;
int height = 300;
ResourceManager::setBinPath(std::string(argv[0]));
sf::RenderWindow window(sf::VideoMode(width, height), "term3-ids");
window.setFramerateLimit(30);
//window.setVerticalSyncEnabled(true);
if (argc != 2) {
Logger::log("Map path not specified");
return 1;
}
GameScreen gameScreen;
std::string mapPath(argv[1]);
std::cout << mapPath << std::endl;
gameScreen.setMapPath(mapPath);
gameScreen.run(window);
return 0;
}
<file_sep>/src/Wave.h
#ifndef WAVE_H
#define WAVE_H
#include <vector>
#include "Updateable.h"
#include "Creep.h"
#include "TileDrawable.h"
class Wave: public Updateable, public Drawable {
public:
Wave() :
waitingCreeps_(std::vector<Creep>()),
entrySpeed_(1),
timeSinceLastCreep_(1),
creepsLeakedSinceLastUpdate_(0) {};
Wave(std::vector<Creep> creeps, double entrySpeed) :
waitingCreeps_(creeps),
entrySpeed_(entrySpeed),
timeSinceLastCreep_(0),
creepsLeakedSinceLastUpdate_(0) {};
const std::vector<Creep>& getActiveCreeps();
void update();
void draw(sf::RenderWindow&);
int getCreepsLeakedSinceLastUpdate();
bool isDefeated();
private:
void sendCreep();
std::vector<Creep> waitingCreeps_;
std::vector<Creep> activeCreeps_;
double entrySpeed_;
double timeSinceLastCreep_;
int creepsLeakedSinceLastUpdate_;
};
#endif
<file_sep>/src/TileMap.cpp
#include "TileMap.h"
#include <vector>
void TileMap::draw(sf::RenderWindow& window) {
size_t i;
for (i=0;i<tiles_.size();i++) {
tiles_.at(i).draw(window);
}
return;
}
void TileMap::setTiles(std::vector<Tile>& tiles) {
tiles_ = tiles;
return;
}
void TileMap::setNumTilesX(int numTilesX) {
numTilesX_ = numTilesX;
return;
}
void TileMap::layoutTiles(int tileSize) {
// Set tile positions
int x, y;
for (size_t i=0;i<tiles_.size();i++) {
x = (i % numTilesX_) * TileSet::getDrawnSize();
y = (i / numTilesX_) * TileSet::getDrawnSize();
tiles_.at(i).setPosition(sf::Vector2f(x, y));
}
return;
}
const std::vector<Tile>& TileMap::getTiles() const {
return tiles_;
}
int TileMap::getNumTilesX() const{
return numTilesX_;
}
int TileMap::getNumTilesY() const{
return tiles_.size() / numTilesX_;
}
<file_sep>/src/Updateable.cpp
#include "Updateable.h"
#include <SFML/Graphics.hpp>
sf::Clock Updateable::clock_;
double Updateable::delta_;
double Updateable::totalTime_;
void Updateable::updateDelta() {
double tempTime(clock_.getElapsedTime().asSeconds());
delta_ = tempTime-totalTime_;
totalTime_=tempTime;
}
double Updateable::getDelta() {
return delta_;
}
<file_sep>/src/MenuScreen.h
#ifndef MENUSCREEN_H
#define MENUSCREEN_H
#include "Screen.h"
class MenuScreen: public Screen {
public:
static const int LOAD_MAP = 1;
static const int FONT_LOAD_FAILED = 2;
static const int LOGIC_ERROR = 3;
virtual int run(sf::RenderWindow &window);
const char* getMapPath();
private:
const char* mapPath_;
};
#endif
<file_sep>/src/Updateable.h
#ifndef UPDATEABLE_H
#define UPDATEABLE_H
#include <SFML/Graphics.hpp>
class Updateable {
public:
static void updateDelta();
static double getDelta();
virtual void update() = 0;
private:
static sf::Clock clock_;
static double delta_;
static double totalTime_;
};
#endif
<file_sep>/src/Collideable.cpp
#include "Collideable.h"
sf::FloatRect Collideable::getCollisionRect() {
return collisionRect_;
}
void Collideable::setCollisionRect(sf::FloatRect collisionRect) {
collisionRect_ = collisionRect;
return;
}
<file_sep>/src/TileMap.h
#ifndef TILEMAP_H
#define TILEMAP_H
#include <vector>
#include <SFML/Graphics.hpp>
#include "Tile.h"
class TileMap: public Drawable {
public:
TileMap() :
tiles_(std::vector<Tile>()),
numTilesX_(1) {};
TileMap(std::vector<Tile> tiles, int numTilesX) :
tiles_(tiles),
numTilesX_(numTilesX) {};
const std::vector<Tile>& getTiles() const;
int getNumTilesX() const;
int getNumTilesY() const;
void setTiles(std::vector<Tile>&);
void layoutTiles(int);
void setNumTilesX(int);
void draw(sf::RenderWindow&);
private:
std::vector<Tile> tiles_;
int numTilesX_;
};
#endif
<file_sep>/src/Drawable.cpp
#include "Drawable.h"
void Drawable::setPosition(sf::Vector2f position) {
position_ = position;
return;
}
sf::Vector2f Drawable::getPosition() {
return position_;
}
<file_sep>/src/ResourceManager.cpp
#include "ResourceManager.h"
#include <string>
std::string ResourceManager::binPath_;
void ResourceManager::setBinPath(std::string path) {
binPath_ = path;
return;
}
std::string ResourceManager::getResourcePath() {
return goUpNDirs(binPath_, 2) + "/res";
}
std::string ResourceManager::goUpNDirs(std::string path, int n) {
if (path.at(path.length() - 1) == '/') {
path = path.substr(0, path.size() - 1);
}
int i;
int slashCount = 0;
for (i = path.size() - 1; i >= 0; i--) {
if (path.at(i) == '/' || path.at(i) == '\\') {
slashCount++;
}
if (slashCount == n) {
break;
}
}
return path.substr(0, i);
}
<file_sep>/src/TileDrawable.h
#ifndef TILEDRAWABLE_H
#define TILEDRAWABLE_H
#include "TileSet.h"
#include "Drawable.h"
#include <SFML/Graphics.hpp>
class TileDrawable : public Drawable {
public:
virtual void draw(sf::RenderWindow&) = 0;
void setTileSet(TileSet*);
protected:
TileSet* tileSet_;
};
#endif
<file_sep>/makefile
SRC_DIR = src
BUILD_DIR = build
BIN_DIR = bin
BIN_NAME = tower_def
CC = g++
CFLAGS = -Wall -c -g
LFLAGS = -Wall -lsfml-graphics -lsfml-window -lsfml-system -ltinyxml2
OBJS = $(addprefix $(BUILD_DIR)/,main.o Tile.o MenuScreen.o GameScreen.o ResourceManager.o Logger.o TileDrawable.o Collideable.o Drawable.o Creep.o Updateable.o CreepPathfinder.o TileSet.o TileMap.o Wave.o)
all: binary
binary: make_build_dir $(OBJS)
mkdir -p $(BIN_DIR)
$(CC) $(BUILD_DIR)/*.o -o $(BIN_DIR)/$(BIN_NAME) $(LFLAGS)
$(BUILD_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CC) $(CFLAGS) -o $@ $<
make_build_dir:
mkdir -p $(BUILD_DIR)
clean:
rm -rf $(BUILD_DIR)
rm -rf $(BIN_DIR)
| abff864f3d1e9828d2c9d75e1bb369037f046946 | [
"Makefile",
"C++"
] | 32 | C++ | GunshipPenguin/term3-ids | b5dc60f61f8d1c795961ffebbc118c5e77a1c1ad | 1839bccaf7b4227735dd7d38b424b09a159f9f36 |
refs/heads/master | <file_sep>const {version, name} = require('../package.json')
const started = new Date()
module.exports = {
serviceInfo() {
return {
service: name,
version,
more: 'https://refractor.stellar.expert/',
started: started.toISOString()
}
}
}<file_sep>const MongoClient = require('mongodb').MongoClient,
{name: appname} = require('../package.json'),
config = require('../app.config'),
DataProvider = require('./data-provider'),
TxSignature = require('../models/tx-signature')
class MongodbDataProvider extends DataProvider {
async init() {
const options = {
appname,
promoteValues: true,
promoteLongs: false,
keepAlive: true,
useNewUrlParser: true,
useUnifiedTopology: true,
authSource: 'admin',
poolSize: 30
}
const connection = await MongoClient.connect(config.db, options)
this.db = connection.db()
console.log(`Connected to MongoDB data source ${this.db.databaseName}`)
}
/**
*
* @type {Db}
*/
db = null
/**
*
* @type {Collection}
*/
get txCollection() {
return this.db.collection('tx')
}
/**
* Store transaction.
* @param {TxModel} txModel
* @returns {Promise}
*/
async saveTransaction(txModel) {
const {hash, ...otherProps} = txModel
await this.txCollection
.updateOne({_id: hash}, {$set: otherProps}, {upsert: true})
}
/**
*
* @param {String} hash
* @return {Promise<TxModel>}
*/
async findTransaction(hash) {
const doc = await this.txCollection
.findOne({_id: hash})
if (!doc) return null
doc.hash = doc._id
delete doc._id
if (doc.signatures) {
doc.signatures = doc.signatures.map(s => {
const ts = new TxSignature()
ts.key = s.key
ts.signature = s.signature.buffer
return ts
})
}
return doc
}
async updateTransaction(hash, update, expectedCurrentStatus) {
const filter = {_id: hash}
if (expectedCurrentStatus !== undefined) {
filter.status = expectedCurrentStatus
}
const {matchedCount} = await this.txCollection
.updateOne(filter, {$set: update})
return matchedCount > 0 //success if the operation matched
}
listTransactions(filter) {
const condition = {...filter}
if (condition.hash) {
condition._id = condition.hash
delete condition.hash
}
return this.txCollection.find(condition, {
projection: {
hash: '$_id',
_id: 0,
status: 1,
network: 1,
xdr: 1,
callbackUrl: 1,
maxTime: 1,
minTime: 1,
signatures: 1,
submit: 1,
submitted: 1,
desiredSigners: 1
}
})
}
}
module.exports = MongodbDataProvider<file_sep>const {normalizeNetworkName, resolveNetwork} = require('./network-resolver'),
storageLayer = require('../storage/storage-layer'),
{TransactionBuilder} = require('stellar-sdk')
async function loadRehydrateTx(hash) {
const txInfo = await storageLayer.dataProvider.findTransaction(hash)
if (!txInfo) {
const notFound = new Error(`Transaction ${hash} not found.`)
notFound.status = 404
return Promise.reject(notFound)
}
return rehydrateTx(txInfo)
}
/**
*
* @param {TxModel} txInfo
* @return {TxModel}
*/
function rehydrateTx(txInfo) {
const {network, xdr, ...res} = txInfo
const tx = TransactionBuilder.fromXDR(xdr, resolveNetwork(network).passphrase)
//rehydrate - set network and add signatures from tx info
res.network = normalizeNetworkName(network)
for (const {key, signature} of txInfo.signatures) {
tx.addSignature(key, signature.toString('base64'))
}
res.xdr = tx.toXDR()
return res
}
module.exports = {loadRehydrateTx, rehydrateTx}<file_sep>/**
* Executes simple predicate match against a given object
* @param {Object} value
* @param {Object} predicate
* @return {Boolean}
*/
function matchPredicate(value, predicate) {
if (!value) return false
for (let [key, condition] of Object.entries(predicate)) {
const fieldValue = value[key]
//equality match
if (typeof condition !== 'object') {
if (fieldValue !== condition) return false
} else {
//more complex condition in play
for (let [operator, predicateValue] of Object.entries(condition)) {
const func = operatorMatcher[operator]
if (!func) throw new EvalError(`Unknown predicate operator: ${func}`)
//evaluate condition predicate
if (!func(predicateValue, fieldValue)) return false
}
}
}
return true
}
const operatorMatcher = {
$gt(predicateValue, fieldValue) {
return fieldValue > predicateValue
},
$gte(predicateValue, fieldValue) {
return fieldValue >= predicateValue
},
$lt(predicateValue, fieldValue) {
return fieldValue < predicateValue
},
$lte(predicateValue, fieldValue) {
return fieldValue <= predicateValue
},
$in(predicateValue, fieldValue) {
return predicateValue.includes(fieldValue)
},
$ne(predicateValue, fieldValue) {
return fieldValue !== predicateValue
}
}
module.exports = {matchPredicate}<file_sep>const {StrKey} = require('stellar-sdk'),
{standardError} = require('./std-error'),
{resolveNetwork, resolveNetworkId} = require('./network-resolver'),
TxModel = require('../models/tx-model'),
{getUnixTimestamp} = require('./timestamp-utils')
/**
*
* @param {Transaction} tx
* @param {'pubnet'|'testnet'} network
* @param {String}callbackUrl
* @param {Boolean} submit
* @param {Array<String>} desiredSigners
* @param {Number} expires
* @returns {TxModel}
*/
function parseTxParams(tx, {network, callbackUrl, submit, desiredSigners, expires = 0}) {
const now = getUnixTimestamp()
const txInfo = new TxModel()
txInfo.network = resolveNetworkId(network)
txInfo.xdr = tx.toXDR()
txInfo.signatures = []
if (callbackUrl) {
if (!/^http(s)?:\/\/[-a-zA-Z0-9_+.]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_+.~#?&/=]*)?$/m.test(callbackUrl))
throw standardError(400, 'Invalid URL supplied in "callbackUrl" parameter.')
txInfo.callbackUrl = callbackUrl
}
if (desiredSigners && desiredSigners.length) {
if (!(desiredSigners instanceof Array))
throw standardError(400, 'Invalid "requestedSigners" parameter. Expected an array of Stellar public keys.')
for (const key of desiredSigners)
if (!StrKey.isValidEd25519PublicKey(key))
throw standardError(400, `Invalid "requestedSigners" parameter. Key ${key} is not a valid Stellar public key.`)
txInfo.desiredSigners = desiredSigners
}
txInfo.minTime = (tx.timeBounds && parseInt(tx.timeBounds.minTime)) || 0
if (expires) {
if (expires > 2147483647 || expires < 0)
throw standardError(400, `Invalid "expires" parameter. ${expires} is not a valid UNIX date.`)
if (expires < now)
throw standardError(400, `Invalid "expires" parameter. ${expires} date has already passed.`)
// if (expires > now + maxAgeDays * 24 * 60 * 60)
// throw standardError(400, `Invalid "expires" parameter. Transactions can be stored for no more than ${maxAgeDays} days.`)
}
//retrieve expiration time from the transaction itself
const txExpiration = (tx.timeBounds && parseInt(tx.timeBounds.maxTime)) || 0
if (txExpiration && txExpiration < now)
throw standardError(400, `Invalid transactions "timebounds.maxTime" value - the transaction already expired.`)
if (txExpiration > 0 && txExpiration < expires) {
expires = txExpiration
}
if (expires > 0) {
txInfo.maxTime = expires
}
if (submit === true) {
txInfo.submit = true
}
return txInfo
}
function sliceTx(tx) {
const signatures = tx.signatures.slice()
tx._signatures = []
return {tx, signatures}
}
module.exports = {parseTxParams, sliceTx}<file_sep>const {matchPredicate} = require('../storage/simple-predicate-matcher')
const value = {a: 10, b: 'check', c: false}
test.each([
[{a: 10}, true],
[{a: 10, b: 'check'}, true],
[{a: 8, b: 'check'}, false],
[{a: {$lt: 20}}, true],
[{a: {$lt: 10}}, false],
[{a: {$lt: 20, $gte: 10}}, true],
[{a: {$lt: 10, $gt: 5}}, false],
[{a: {$ne: 5}}, true],
[{a: {$ne: 10}}, false],
[{b: {$in: ['a', 'check']}}, true],
[{b: {$in: ['a', 'b']}}, false]
])('simple predicate match - %o %p', (predicate, matches) => {
expect(matchPredicate(value, predicate)).toEqual(matches)
})<file_sep>const {Pool} = require('pg'),
{parse: parseConnectionString} = require('pg-connection-string'),
{StrKey} = require('stellar-sdk'),
{networks} = require('../app.config.json')
const pools = {}
function initPgDbPools() {
for (const network of Object.keys(networks)) {
const {coredb} = networks[network]
pools[network] = new Pool(coredb)
}
}
function formatSigner(key, weight) {
return {
type: 'ed25519_public_key',
key,
weight
}
}
async function loadAccountsInfo(network, accounts) {
const pool = pools[network]
const {rows} = await pool.query('select accountid, thresholds, flags, signers from accounts where accountid = ANY($1)', [accounts])
return rows.map(function ({accountid, thresholds, flags, signers}) {
const accountSigners = []
const rawThresholds = Buffer.from(thresholds, 'base64')
const masterWeight = rawThresholds[0]
if (masterWeight > 0) {
accountSigners.push(formatSigner(accountid, masterWeight))
}
if (signers) {
const raw = Buffer.from(signers, 'base64')
const signersCount = raw.readUInt32BE(0)
let ptr = 4
for (let i = 0; i < signersCount; i++) {
const type = raw.readUInt32BE(ptr)
ptr += 4
const key = StrKey.encodeEd25519PublicKey(raw.slice(ptr, ptr + 32))
ptr += 32
const weight = raw.readUInt32BE(ptr)
ptr += 4
if (type === 0) {
accountSigners.push(formatSigner(key, weight))
}
//TODO: handle other signer types
}
}
return {
account_id: accountid,
id: accountid,
signers: accountSigners,
thresholds: {
low_threshold: rawThresholds[1],
med_threshold: rawThresholds[2],
high_threshold: rawThresholds[3]
}
}
//const parsedFlags = xdr.AccountFlags.fromXDR(Buffer.from(flags, 'base64'))
})
}
module.exports = {loadAccountsInfo, initPgDbPools}<file_sep>const {Firestore, FieldPath} = require('@google-cloud/firestore'),
{firestore} = require('../app.config.json'),
MongodbDataProvider = require('./mongodb-data-provider')
/**
*
* @param {TxModel} txModel
* @return {Object}
*/
function txModelToPlainObject(txModel) {
const res = {...txModel},
{signatures} = res
if (signatures) {
res.signatures = signatures.map(s => ({...s}))
}
return res
}
class MongodbFirestoreDataProvider extends MongodbDataProvider {
async init() {
await super.init()
this.firestore = new Firestore({
projectId: firestore.project_id,
credentials: firestore,
ignoreUndefinedProperties: true
})
console.log(`Connected to Firestore data source ${firestore.project_id}`)
}
/**
* @type {Firestore}
*/
firestore
getDoc(hash) {
return this.firestore.doc('tx/' + hash)
}
/**
* Store transaction.
* @param {TxModel} txModel
* @returns {Promise}
*/
async saveTransaction(txModel) {
await super.saveTransaction(txModel)
await this.getDoc(txModel.hash).set(txModelToPlainObject(txModel))
}
async updateTransaction(hash, update, expectedCurrentStatus) {
const res = await super.updateTransaction(hash, update, expectedCurrentStatus)
if (res) {
await this.getDoc(hash).update(txModelToPlainObject(update))
}
return res
}
}
module.exports = MongodbFirestoreDataProvider<file_sep>const axios = require('axios')
let callbackHandler = function (txInfo) {
const {tx, network, hash, callbackUrl} = txInfo
return axios.post(callbackUrl, {tx, hash, network})
}
/**
*
* @param {TxModel} txInfo
* @returns {Promise}
*/
async function processCallback(txInfo) {
if (!txInfo.callbackUrl) throw new Error(`Attempt to execute an empty callback for tx ${txInfo.hash}`)
for (let i = 4; i <= 12; i++) {
const {statusCode} = await callbackHandler(txInfo)
if (statusCode === 200) return
//repeat
await new Promise(resolve => setTimeout(resolve, 1 << i)) //exponential backoff waiting strategy
}
throw new Error(`Server returned invalid status code after processing the callback`) //no response from the server
}
function setCallbackHandler(handler) {
callbackHandler = handler
}
module.exports = {processCallback, setCallbackHandler}<file_sep>import React, {useState} from 'react'
import isEqual from 'react-fast-compare'
import nav from '../../infrastructure/nav'
import {networks} from '../../app.config.json'
import Dropdown from '../components/dropdown'
import {submitTx} from '../../infrastructure/tx-dispatcher'
function DesiredTxSignersView({signers, onChange}) {
const signersToRender = [...signers].filter(s => !!s)
signersToRender.push('')
function editSigner(i, e) {
const newSigners = [...signers],
{value} = e.target
newSigners[i] = value//.replace(/[^ABCDEFGHIJKLMNOPQRSTUVWXYZ234567]/g, '')
onChange(newSigners.filter(s => !!s))
}
return <div>
{signersToRender.map((signer, i) => <div key={i}>
<input type="text" value={signer} placeholder="Copy-paste signer key here"
onChange={e => editSigner(i, e)}/>
</div>)}
</div>
}
export default function AddTxView() {
const [data, setData] = useState({
xdr: '',
network: 'testnet',
submit: false,
callback: '',
expires: '',
desiredSigners: []
}),
[error, setError] = useState(''),
[inProgress, setInProgress] = useState(false)
function setParam(param, value) {
setData(prev => {
const newData = {...prev, [param]: value}
if (!isEqual(prev, newData)) return newData
return prev
})
}
function storeTx() {
setInProgress(true)
return submitTx(data)
.then(res => {
console.log(res)
nav.navigate(`/tx/${res.hash}`)
})
.catch(e => {
console.error(e)
setError(e.message)
})
.finally(() => setInProgress(false))
}
const networkOptions = Object.keys(networks).map(key => ({value: key, title: networks[key].title}))
return <div className="card">
<h2>Submit new transaction</h2>
<hr/>
<div className="space">
<div>
<label>Stellar network{' '}
<Dropdown options={networkOptions} value={data.network} onChange={n => setParam('network', n)}/>
</label>
</div>
<div className="space">
<label>Transaction XDR
<div className="dimmed small">You can build it using {' '}
<a href="https://laboratory.stellar.org/#txbuilder" target="_blank">Stellar Laboratory</a>{' '}
or any <a href="https://developers.stellar.org/docs/software-and-sdks/#sdks" target="_blank">
Stellar SDK</a>
</div>
<textarea value={data.xdr} disabled={inProgress} style={{width: '100%', height: '5em'}}
placeholder="Base64-encoded transaction envelope"
onChange={e => setParam('xdr', e.target.value.trim())}/>
</label>
</div>
<div>
<label>
Valid until
<br/>
<input value={data.expires} onChange={e => setParam('expires', e.target.value)}
placeholder="UNIX timestamp or ISO date, like 2020-11-29T09:29:13Z"/>
</label>
</div>
<div>
<label>
Callback URL where this transaction will be POSTed once signed
<br/>
<input type="text" value={data.callback} onChange={e => setParam('callback', e.target.value.trim())}
placeholder="for example, https://my.service/success.php"/>
</label>
</div>
<div>
<label>
<input type="checkbox" checked={data.submit}
onChange={e => setParam('submit', e.target.checked)}/>{' '}
Submit this transaction to the network once ready
</label>
</div>
{/*<div>
<label>Desired signers</label>
<DesiredTxSignersView signers={data.desiredSigners}
onChange={newSigners => setParam('desiredSigners', newSigners)}/>
</div>*/}
{inProgress && <div className="loader"/>}
{!!error && <div className="error">
{error}
</div>}
<div className="space row">
<div className="column column-25">
<button className="button button-block" disabled={inProgress} onClick={storeTx}>Save</button>
</div>
<div className="column column-75">
<div className="micro-space small dimmed">
Please note: this a developers preview, it is not recommended to use it production environments.
</div>
</div>
</div>
</div>
</div>
}<file_sep>import React from 'react'
import PropTypes from 'prop-types'
import cn from 'classnames'
import './block-select.scss'
function select(target) {
//handle textarea and inputs
if (target.nodeName.match(/^(INPUT|TEXTAREA)$/i)) {
target.focus()
target.select()
return
}
//handle selection inside text elements
const range = document.createRange()
range.selectNodeContents(target)
const sel = window.getSelection()
if (typeof sel.setBaseAndExtent === 'function') {
// Safari
sel.setBaseAndExtent(target, 0, target, 1)
}
sel.removeAllRanges()
sel.addRange(range)
}
function BlockSelect({children, as = 'span', title, className, style, wrap, ...otherProps}) {
const props = {
className: cn('block-select', className, {'word-wrap': wrap}),
onFocus: e => select(e.target),
tabIndex: '-1',
style,
title,
...otherProps
}
return React.createElement(as, props, children)
}
BlockSelect.propTypes = {
children: PropTypes.any.isRequired,
title: PropTypes.string,
className: PropTypes.string,
style: PropTypes.object,
wrap: PropTypes.bool,
as: PropTypes.string
}
export default BlockSelect<file_sep>import React from 'react'
import BlockSelect from '../components/block-select'
function SignerKey({address, selected}) {
return <div className="small">
{selected ? '✓' : '-'} <BlockSelect>{address}</BlockSelect>
</div>
}
export default function TxSignaturesView({signatures, schema, readyToSubmit}) {
const appliedSigners = signatures.map(sig => sig.key),
potentialSigners = schema.discoverSigners(),
otherAvailableSigners = potentialSigners.filter(signer => !appliedSigners.includes(signer))
return <>
<h3>Signatures</h3>
{signatures.map(({key}) => <SignerKey key={key} address={key} selected/>)}
{!signatures?.length && <div className="dimmed small">
(no signatures so far)
</div>}
{!readyToSubmit && otherAvailableSigners.length > 0 &&
<div className="micro-space">
<h4 className="dimmed">{signatures.length ? 'Other available' : 'Available'} signers:</h4>
{otherAvailableSigners.map(signer => <SignerKey key={signer} address={signer}/>)}
</div>
}
</>
}<file_sep>export function stringifyQuery(query) {
if (!query) return ''
const q = [],
entries = Object.entries(query)
entries.sort(function ([a], [b]) {
if (a > b) return 1
if (a < b) return -1
return 0
})
for (let [key, value] of entries) {
if (value !== undefined && value !== null && value !== '') {
if (value instanceof Array) {
for (let v of value) {
q.push(`${encodeURIComponent(key)}[]=${encodeURIComponent(v)}`)
}
} else {
q.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`)
}
}
}
return q.length ? '?' + q.join('&') : ''
}
export function parseQuery(query = null, dest = null) {
if (query === null) {
query = window.location.search
}
if (query[0] === '?') query = query.substr(1)
if (!dest) {
dest = {}
}
for (let kv of query.split('&')) {
let [key, value] = kv.split('=').map(v => decodeURIComponent(v))
if (key) {
if (/\[\]$/.test(key)) {
key = key.substr(0, key.length - 2)
const array = dest[key] || []
array.push(value)
value = array
}
dest[key] = value
}
}
return dest
}<file_sep>const FsDataProvider = require('./fs-data-provider'),
MongodbDataProvider = require('./mongodb-data-provider'),
MongodbFirestoreDataProvider = require('./mongodb-firestore-data-provider'),
InMemoryDataProvider = require('./inmemory-data-provider'),
{storage} = require('../app.config.json')
class StorageLayer {
async initDataProvider(providerName = storage) {
if (!this.dataProvider) {
let provider
switch (providerName) {
case 'fs':
provider = new FsDataProvider()
break
case 'mongodb':
provider = new MongodbDataProvider()
break
case 'mongodb+firestore':
provider = new MongodbFirestoreDataProvider()
break
case 'inmemory':
provider = new InMemoryDataProvider()
break
default:
throw new Error(`Unsupported data provider storage engine: ${providerName}`)
}
await provider.init()
this.dataProvider = provider
}
return this.dataProvider
}
/**
* @type {DataProvider}
*/
dataProvider
}
const storageLayer = new StorageLayer()
module.exports = storageLayer<file_sep>import {createBrowserHistory} from 'history'
import isEqual from 'react-fast-compare'
import {parseQuery, stringifyQuery} from './url-utils'
const history = window.__history = createBrowserHistory()
function setQueryInternal(paramsToSet, replace = false) {
const prevState = JSON.stringify(this.query),
newQuery = Object.assign({}, replace ? null : this.query, paramsToSet)
if (JSON.stringify(newQuery) !== prevState) {
Object.freeze(newQuery)
this.query = newQuery
return true
}
return false
}
class Nav {
constructor() {
this.query = {}
this.updateQuery(parseQuery())
}
query = {}
get history() {
return history
}
get path() {
return history.location.pathname
}
get hash() {
return history.location.hash
}
set hash(value) {
if (value[0] !== '#') value = '#' + value
window.location.hash = value
history.location.hash = value
}
updateQuery(paramsToSet, replace = false) {
if (setQueryInternal.call(this, paramsToSet)) {
const {pathname} = this.history.location,
newUrl = pathname + stringifyQuery(this.query)
this.history.replace(newUrl)
}
}
navigate(url) {
if (history.location.pathname + history.location.search === url.replace(/#.*$/, '')) return
setQueryInternal.call(this, parseQuery(url.split('?')[1] || ''), true)
this.history.push(url)
}
replaceState(url) {
if (history.location.pathname + history.location.search === url.replace(/#.*$/, '')) return
setQueryInternal.call(this, parseQuery(url.split('?')[1] || ''), true)
this.history.replace(url)
}
urlChanged(currentProps, nextProps) {
return this.pathChanged(currentProps, nextProps)
|| nextProps.location.search !== currentProps.location.search
}
pathChanged(currentProps, nextProps) {
return !isEqual(nextProps.match, currentProps.match)
}
queryParamChanged(currentProps, nextProps, ...params) {
const currentQuery = parseQuery(currentProps.location.search),
nextQuery = parseQuery(nextProps.location.search)
for (const param of params) {
if (currentQuery[param] !== nextQuery[param]) return true
}
return false
}
}
const nav = new Nav()
export default nav
export function bindClickNavHandler(container) {
container.addEventListener('click', e => {
//do not process ctrl+click on links
if (e.ctrlKey) return
let link = e.target
while (link && !(link.tagName === 'A' && link.href)) {
link = link.parentElement
}
if (link) {
const href = link.getAttribute('href')
if (!href) return
if (href === '#') return e.preventDefault()
if (link.target === '_blank') return
if (window.parent !== window) {
window.top.location = /^(https?):\/\//.test(href) ? href : (window.origin + href)
return e.preventDefault()
}
if (link.classList.contains('external-link')) return
if (/^(mailto:|tel:|(https?):\/\/)/.test(href)) return
const currentLocation = history.location
const [pathAndQuery, hash] = href.split('#')
if (!pathAndQuery || (currentLocation.pathname + currentLocation.search) === pathAndQuery) {
//allow jumping to the element by id
if (hash) return
return e.preventDefault()
}
if (link.classList.contains('static-link')) return e.preventDefault()
nav.navigate(href)
e.preventDefault()
window.scrollTo(0, 0)
}
})
}<file_sep>import {TransactionBuilder, StrKey, Server} from 'stellar-sdk'
import {inspectTransactionSigners} from '@stellar-expert/tx-signers-inspector'
import {networks} from '../app.config.json'
import {apiCall} from './api'
export async function validateNewTx(data) {
const res = {
network: data.network,
xdr: data.xdr
}
//resolve network name
const networkParams = networks[data.network]
if (!networkParams)
throw new Error('Invalid network')
//assume that a transaction is valid if we can parse it
try {
TransactionBuilder.fromXDR(data.xdr, networkParams.passphrase)
//TODO: add more complex checks in the future
} catch (e) {
throw new Error('Invalid transaction xdr')
}
//check if auto-submit is set
if (data.submit === true) {
res.submit = true
}
//validate callback url
if (data.callback) {
if (!/^http(s)?:\/\/[-a-zA-Z0-9_+.]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_+.~#?&/=]*)?$/.test(data.callback))
throw new Error('Invalid callback URL')
res.callbackUrl = data.callback
}
//check and parse expiration date
if (data.expires) {
let expires
if (data.expires.toString().match(/^\d+$/)) {
expires = parseInt(data.expires)
//check that input is a valid Unix timestamp
if (expires < 0 || expires > 2147483648)
throw new Error('Invalid expiration date - UNIX timestamp expected')
res.expires = data.expires
} else {
const ts = Date.parse(data.expires)
if (isNaN(ts))
throw new Error('Invalid expiration date - unknown data format')
res.expires = ts / 1000 >> 0
}
if (res.expires < new Date().getTime() / 1000)
throw new Error('Invalid expiration date - only dates in the future allowed')
}
//validate proposed signers
if (data.desiredSigners?.length) {
const nonEmptySigners = data.desiredSigners.filter(s => !!s)
for (let signer of nonEmptySigners) {
if (!StrKey.isValidEd25519PublicKey(signer))
throw new Error('Invalid signer public key - ' + signer)
}
res.desiredSigners = nonEmptySigners
}
//everything is ok - return processed data
return res
}
export async function submitTx(data) {
//validate and prepare the data
const parsedData = await validateNewTx(data)
//submit ot the server
const txInfo = await apiCall('tx', parsedData, {method: 'POST'})
return await prepareTxInfo(txInfo)
}
export async function loadTx(txhash) {
if (typeof txhash !== 'string' || !/^[a-f0-9]{64}$/i.test(txhash))
throw new Error(`Invalid transaction hash: ${txhash || '(empty)'}`)
//load from the server
const txInfo = await apiCall('tx/' + txhash)
if (txInfo.status === 'ready') {
try {
const {created_at, successful} = await new Server(networks[txInfo.network].horizon)
.transactions().transaction(txInfo.hash).call()
if (successful) {
txInfo.submitted = new Date(created_at)
txInfo.status = 'processed'
} else {
txInfo.status = 'failed'
}
} catch (e) {
}
}
return await prepareTxInfo(txInfo)
}
async function prepareTxInfo(txInfo) {
//create Transaction object
const {passphrase, horizon} = networks[txInfo.network],
tx = TransactionBuilder.fromXDR(txInfo.xdr, passphrase)
//discover signers and check whether it is fully signed
const schema = await inspectTransactionSigners(tx, {horizon})
txInfo.schema = schema
txInfo.readyToSubmit = schema.checkFeasibility(txInfo.signatures.map(sig => sig.key))
return txInfo
//TODO: fetch info from Horizon for a submitted transaction
}
<file_sep>import React from 'react'
import PropTypes from 'prop-types'
import Catcher from './general/catcher'
export default function Layout({children}) {
return <div className="page-wrapper">
<div className="blue-ribbon"/>
<div className="top-block">
<div className="container">
<a href="/" className="logo"><img src="/img/refractor-small-logo.png" alt="refractor logo"/></a>
<div className="main-menu">
<a href="/tx/add">Store transaction</a>
<a href="/openapi.html" target="_blank">Documentation & API</a>
</div>
</div>
</div>
<div className="page-container">
<div className="container">
<Catcher>{children}</Catcher>
</div>
</div>
<div className="footer">
<div className="container text-center">
<div>{new Date().getFullYear()} © Refractor Web <span className="dimmed">v{appVersion}</span>
</div>
<div>
<a href="https://github.com/stellar-expert/refractor" target="_blank" rel="noopener"
className="nowrap">
Open Source
</a> 
<a href="mailto:<EMAIL>" target="_blank" rel="noopener" className="nowrap">
<EMAIL>
</a>
</div>
</div>
</div>
</div>
}
Layout.propTypes = {
children: PropTypes.node
}
<file_sep>import React, {useState} from 'react'
import albedo from '@albedo-link/intent'
import {submitTx} from '../../infrastructure/tx-dispatcher'
import BlockSelect from '../components/block-select'
function TxStoreResult({result}) {
if (!result) return null
const {accepted, rejected} = result.changes
return <>
<h3>Changes</h3>
<div className="small">
{accepted?.length > 0 && <div>
<div>
Accepted signatures:
</div>
<div>
{accepted.map(s => <div key={s.signature}>✓ <BlockSelect>{s.key}</BlockSelect></div>)}
</div>
</div>}
{rejected?.length > 0 && <div>
<div>
Rejected signatures:
</div>
<div>
{rejected.map(s => <div key={s.signature}>✗ <BlockSelect>{s.key}</BlockSelect></div>)}
</div>
</div>}
</div>
</>
}
export default function TxAddSignatureView({txInfo, onUpdate}) {
const [error, setError] = useState(''),
[inProgress, setInProgress] = useState(false),
[result, setResult] = useState(null)
if (txInfo.readyToSubmit || txInfo.submitted) return null
function requestSignature() {
setError('')
setInProgress(true)
setResult(null)
albedo.tx({xdr: txInfo.xdr, network: txInfo.network})
.then(({signed_envelope_xdr: xdr}) => submitTx({...txInfo, xdr}))
.then(txInfo => {
setResult(txInfo)
onUpdate(txInfo)
})
.catch(e => {
setError(e.message)
})
.finally(() => setInProgress(false))
}
return <div className="space">
<div className="row">
<div className="column column-25">
<button className="button button-block" disabled={inProgress} onClick={requestSignature}>Sign
transaction
</button>
</div>
</div>
{!!error && <div className="error">{error}</div>}
{inProgress && <div className="loader"/>}
<TxStoreResult result={result}/>
</div>
}<file_sep>(async function () {
process.env.TZ = 'Etc/UTC'
console.log('Starting up Refractor API')
const http = require('http'),
express = require('express'),
bodyParser = require('body-parser'),
{port, trustProxy} = require('./app.config'),
finalizer = require('./business-logic/finalization/finalizer')
//setup connectors
require('./storage/core-db-data-source').initPgDbPools()
console.log('StellarCore DB connection - initialized')
await require('./storage/storage-layer').initDataProvider()
console.log('Storage data provider - initialized')
await finalizer.resetProcessingStatus()
console.log('Rollback pending actions - done')
//start background workers
finalizer.start()
//init http app
const app = express()
app.disable('x-powered-by')
if (trustProxy) {
app.set('trust proxy', trustProxy)
}
if (process.env.MODE === 'development') {
const logger = require('morgan')
app.use(logger('dev'))
}
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({extended: false}))
// error handler
app.use((err, req, res, next) => {
if (err) console.error(err)
res.status(500).end()
})
/**
* Finalize running tasks and close connections
* @param exitCode
*/
async function gracefulExit(exitCode = 0) {
//exit in any case in 10 seconds
setTimeout(() => {
console.error('Failed to perform clean exit')
process.exit(-1)
}, 5000) //wait max 5 seconds
await new Promise(resolve => {
setTimeout(async () => {
finalizer.stop()
console.log('Clean exit')
process.exit(exitCode)
resolve()
}, 1000)
})
}
process.on('uncaughtException', async err => {
console.warn('Fatal error')
console.error(err)
await gracefulExit(1)
})
process.on('unhandledRejection', async (reason, promise) => {
console.warn('Fatal error - unhandled promise rejection')
console.error(`Unhandled Rejection at: ${promise} reason: ${reason.stack || reason}.`)
await gracefulExit(1)
})
process.on('message', msg => msg === 'shutdown' && gracefulExit()) // handle messages from pm2
process.on('SIGINT', gracefulExit)
process.on('SIGTERM', gracefulExit)
//register API routes
require('./api/api-routes')(app)
console.log('API routes - initialized')
const serverPort = parseInt(process.env.PORT || port || '3000')
app.set('port', serverPort)
const server = http.createServer(app)
server.on('listening', () => console.log(`Refractor API server started on ${server.address().port} port`))
server.listen(serverPort)
})()<file_sep>import React from 'react'
import PropTypes from 'prop-types'
import {Switch, Router, Route, Redirect} from 'react-router'
import Layout from './layout-view'
import TxView from './tx/tx-view'
import AddTxView from './tx/add-tx-view'
import NotFoundView from './general/not-found-view'
import WelcomeView from './general/welcome-view'
//import Home from './pages/home-page-view'
function AppRouter({history}) {
return <Router history={history}>
<Layout>
<Switch>
<Route path="/tx/add" component={AddTxView}/>
<Route path="/tx/:txhash" component={TxView}/>
<Route path="/" exact component={WelcomeView}/>
<Route component={NotFoundView}/>
</Switch>
</Layout>
</Router>
}
AppRouter.propTypes = {
history: PropTypes.object.isRequired
}
export default AppRouter<file_sep>const {registerRoute} = require('./router'),
{loadRehydrateTx} = require('../business-logic/tx-loader'),
Signer = require('../business-logic/signer'),
{serviceInfo} = require('../business-logic/info-handler')
module.exports = function registerRoutes(app) {
registerRoute(app,
'/',
{rate: 'general'},
() => serviceInfo())
registerRoute(app,
'tx/:hash',
{rate: 'general'},
({params}) => loadRehydrateTx(params.hash))
registerRoute(app,
'/tx',
{rate: 'strict', method: 'post'},
async ({body}) => {
const signer = new Signer(body)
await signer.init()
signer.processNewSignatures()
await signer.saveChanges()
return signer.toJSON()
})
}
<file_sep>class DataProvider {
async init() {
}
/**
* Store transaction.
* @param {TxModel} txModel
* @returns {Promise}
*/
async saveTransaction(txModel) {
throw new Error('Not implemented')
}
/**
*
* @param {String} hash
* @return {Promise<TxModel>}
*/
async findTransaction(hash) {
throw new Error('Not implemented')
}
/**
* Get transactions iterator filtered by
* @param {Object} filter
* @return {TxModelsCursor}
*/
listTransactions(filter) {
throw new Error('Not implemented')
}
/**
*
* @param {String} hash
* @param {Object} update
* @param {TxStatus} [expectedCurrentStatus]
* @return {Promise<Boolean>}
*/
async updateTransaction(hash, update, expectedCurrentStatus = undefined) {
throw new Error('Not implemented')
}
/**
*
* @param {String} hash
* @param {TxStatus} newStatus
* @param {TxStatus} [expectedCurrentStatus]
* @return {Promise<Boolean>}
*/
async updateTxStatus(hash, newStatus, expectedCurrentStatus = undefined) {
return this.updateTransaction(hash, {status: newStatus}, expectedCurrentStatus)
}
}
module.exports = DataProvider
/**
* @callback TxModelsCursor
* @async
* @generator
* @yields {TxModel}
*/
<file_sep>const DataProvider = require('./data-provider'),
{matchPredicate} = require('./simple-predicate-matcher')
class InMemoryDataProvider extends DataProvider {
storage
async init() {
this.storage = {}
}
async saveTransaction(txModel) {
this.storage[txModel.hash] = txModel
await this.save()
}
async findTransaction(txId) {
return this.storage[txId]
}
async updateTransaction(hash, update, expectedCurrentStatus) {
const tx = await this.findTransaction(hash)
if (!tx) return false
if (expectedCurrentStatus !== undefined && tx.status !== expectedCurrentStatus) return false
Object.assign(tx, update)
await this.save()
return true
}
listTransactions(filter) {
const matchResult = Object.values(this.storage)
.filter(tx => matchPredicate(tx, filter))
return {
[Symbol.asyncIterator]() {
return {
matchResult,
cursor: 0,
next() {
if (this.cursor >= this.matchResult.length) return Promise.resolve({done: true})
return Promise.resolve({value: this.matchResult[this.cursor++], done: false})
}
}
},
async toArray() {
return matchResult.slice()
}
}
}
async save() {
//no op
}
}
module.exports = InMemoryDataProvider<file_sep>import React from 'react'
export default function WelcomeView() {
return <div className="card">
<h2>Welcome to Refractor<span style={{verticalAlign: 'sub', fontSize: '0.57em'}}>beta</span></h2>
<hr/>
<p className="space">
Refractor is a pending transactions storage and multisig aggregator for Stellar Network.
</p>
<p>
Need a place to keep your transaction until all signatures are in place? {' '}
<a href="/tx/add">Store it here</a>, this is entirely free.
</p>
<h3 className="space">How it works</h3>
<hr/>
<p className="space">
It's a developer-focused service in the first place, but anyone can use it to store transactions
and gather signatures required to match the signing threshold.
You can set the network (Stellar public network or testnet), expiration date, custom callback URL.
Any eligible signer can sign the transaction. As soon as it reaches the required threshold (calculated
automatically), the service either submits the transaction to Stellar network or executes a callback.
</p>
<p>
Once uploaded to the server, the transaction cannot be deleted or modified. Other
services and wallets can access and sign it by a standard URL like{' '}
<code>https://api.refractor.stellar.expert/tx/4b50...3dad</code> where its hash serves as a unique
identifier.
Alternatively, a person who uploaded the transaction can share a direct web link
(e.g. <code>https://refractor.stellar.expert/tx/4b50...3dad</code>) pointing to a page where other people
can sign the transaction using a convenient Albedo web interface.
The website shows information about current signing status, suitable signers, and thresholds.
</p>
<p>
Refractor automatically discovers potential signers and computes the thresholds. The signing process is
fully coordinated, signatures aggregation occurs on the server side which in turn allows us to deal with
potentially problematic cases like applying signatures from two different signers concurrently or preventing
handling <code>TX_BAD_AUTH_EXTRA</code> errors in case of too many signatures applied to a transaction.
Refractor ensures that signatures are valid and consistent.
</p>
<h3 className="space">Potential applications</h3>
<hr/>
<p className="space">
This service may come in handy for anyone working on Stellar multisig-based solutions, namely joint
custodial account operators, escrow services, financial derivative contracts trading (features, options),
p2p lending, insurance, etc.
</p>
<p>
Smart contracts on Stellar imply that some pre-signed or partially signed transactions should be
stored by a user for a prolonged period of time.
For obvious reasons, a service provider can’t be regarded as a trustworthy custodian in this situation
(otherwise, it’s not a trustless solution at all). At the same time, storing pre-signed transactions on the
client-side is not an option as well due to the high risk of losing this transaction which in turn may
result in a permanent funds lock.
Refractor with its retention policy may become a convenient third-party storage for all such use-cases.
</p>
<h3 className="space">Notes and Limitations</h3>
<hr/>
<p className="space">
This is a public beta version and it may contain errors. Currently, we still gather feature requests
from developers. API interface, as well as the storage format itself, is subject to changes.
We use redundant storage, so transactions submitted to the system are very unlikely to get lost
but we cannot offer 100% retention guarantees until the API and all formats are finalized.
</p>
<p>
Please note: You shouldn't store transactions containing any potentially sensitive information as anyone
with the link will be able to view transaction details on Refractor. For example, this may lead to
the potential front-running of trades involving derivatives.
</p>
<p>
Currently, Refractor is configured to work in auto-discovery mode only, which means that it automatically
analyses all source accounts in the transaction, detects required signer weights and thresholds.
While this works flawlessly in most scenarios, this also implies that the source account should exist
beforehand.
Otherwise, it's impossible to discover eligible signers and consequently, there's no way to evaluate
signatures validity.
We have a solution for this, but we need more time to test various edge-cases in order to better protect
users from potentially malicious behavior.
</p>
<p>
Although transactions are processed in order of arrival, setting the autosubmit option does not
guarantee that two transactions submitted and signed right after each other will make it to the ledger in a
strictly sequential manner. If you want to make sure that several interdependent transactions are
submitted sequentially, set timebounds on them accordingly.
</p>
<p>
While the project is fully open source, we need some time to polish the code and properly document
everything. So if you want to run a self-hosted Refractor, please stay in touch, we are working
on documentation and full test coverage.
</p>
</div>
}<file_sep>const {standardError} = require('./std-error'),
{networks} = require('../app.config.json')
function normalizeNetworkName(network) {
switch (network) {
case 'public':
case 'PUBLIC':
case '0':
case 0:
return 'public'
case 'testnet':
case 'test':
case 'TESTNET':
case '1':
case 1:
return 'testnet'
default:
throw standardError(400, 'Unidentified network: ' + network)
}
}
/**
*
* @param {String} network
* @return {{horizon: String, network: String, passphrase: String}}
*/
function resolveNetwork(network) {
return networks[normalizeNetworkName(network)]
}
/**
*
* @param {String} network
* @return {Number}
*/
function resolveNetworkId(network) {
switch (normalizeNetworkName(network)) {
case 'public':
case '0':
return 0
case 'testnet':
case '1':
return 1
default:
throw standardError(400, 'Unidentified network: ' + network)
}
}
function resolveNetworkParams(network) {
return networks[normalizeNetworkName(network)]
}
module.exports = {resolveNetwork, resolveNetworkId, resolveNetworkParams, normalizeNetworkName}<file_sep>/**
* Convert date to UNIX timestamp format
* @param {Date} [date] - Date object to convert
* @return {Number}
*/
function getUnixTimestamp(date = undefined) {
if (date === undefined) {
date = new Date()
}
if (!(date instanceof Date)) return undefined
return Math.floor(date.getTime() / 1000)
}
module.exports = {getUnixTimestamp}<file_sep>import React, {useState} from 'react'
import {Server, TransactionBuilder} from 'stellar-sdk'
import {networks} from '../../app.config.json'
export default function HorizonSubmitTxView({readyToSubmit, submit, submitted, xdr, network}) {
const [inProgress, setInProgress] = useState(false),
[result, setResult] = useState(null),
[error, setError] = useState(null)
function submitTx() {
const {passphrase, horizon} = networks[network],
tx = TransactionBuilder.fromXDR(xdr, passphrase),
server = new Server(horizon)
setInProgress(true)
setError(null)
server.submitTransaction(tx)
.then(() => {
setResult(true)
window.location.reload()
})
.catch(e => {
console.error(e)
let err = 'Transaction failed'
if (e.response.data) {
err += ' ' + JSON.stringify(e.response.data.extras.result_codes)
}
setError(err)
})
.finally(() => {
setInProgress(false)
})
}
if (inProgress) return <div className="loader"/>
return <div className="space">
{readyToSubmit && !submitted ? <>
{!!error && <div className="error">{error}</div>}
{submit ? <p>✓ The transaction is fully signed and will be submitted automatically.</p> :
<div className="row micro-space">
<div className="column column-25">
<button className="button button-block" onClick={() => submitTx()}>Submit</button>
</div>
<div className="column column-75">
<div className="micro-space small dimmed">
Transaction is fully signed and ready to be submitted to the network.
</div>
</div>
</div>}
</> : <>
Transaction is not fully signed yet. More signatures required to match the threshold.
</>}
</div>
}<file_sep>const {TransactionBuilder, FeeBumpTransaction, Keypair} = require('stellar-sdk'),
{inspectTransactionSigners} = require('@stellar-expert/tx-signers-inspector'),
TxModel = require('../models/tx-model'),
TxSignature = require('../models/tx-signature'),
{resolveNetwork, resolveNetworkParams, normalizeNetworkName} = require('./network-resolver'),
{standardError} = require('./std-error'),
{loadAccountsInfo} = require('../storage/core-db-data-source'),
storageLayer = require('../storage/storage-layer'),
{hintMatchesKey, hintToMask} = require('./signature-hint-utils'),
{sliceTx} = require('./tx-params-parser'),
{parseTxParams} = require('./tx-params-parser'),
{getAllSourceAccounts} = require('./tx-helpers'),
{rehydrateTx} = require('./tx-loader')
class Signer {
/**
* @param {Object} request
*/
constructor(request) {
const {xdr, network} = request
let txEnvelope
try {
txEnvelope = TransactionBuilder.fromXDR(xdr, resolveNetwork(network).passphrase)
} catch (e) {
throw standardError(400, `Invalid transaction XDR`)
}
if (txEnvelope instanceof FeeBumpTransaction)
throw standardError(406, `FeeBump transactions not supported`)
const {tx, signatures} = sliceTx(txEnvelope)
this.tx = tx
this.hashRaw = tx.hash()
this.hash = this.hashRaw.toString('hex')
this.signaturesToProcess = signatures
this.txInfo = parseTxParams(tx, request)
this.txInfo.hash = this.hash
this.accepted = []
this.rejected = []
this.status = 'created' //always assume that the tx is new one until we fetched details from db
}
/**
* @type {Transaction}
*/
tx
/**
* @type {String}
*/
hash
/**
* @type {Buffer}
*/
hashRaw
/**
* @type {'draft'|'created'|'updated'|'unchanged'}
*/
status = 'draft'
/**
* @type {TxModel}
*/
txInfo
/**
* @type {Array<TxSignature>}
*/
accepted
/**
* @type {Array<TxSignature>}
*/
rejected
/**
* @type {Array<Object>}
*/
signaturesToProcess
/**
* @type {Array<String>}
*/
potentialSigners
/**
* @type {Object}
*/
schema
async init() {
//check if we have already processed it
let txInfo = await storageLayer.dataProvider.findTransaction(this.hash)
if (txInfo) {
this.txInfo = txInfo //replace tx info with info from db
this.status = 'unchanged'
} else {
this.status = 'created'
}
const {horizon} = resolveNetworkParams(this.txInfo.network)
//find all source accounts participating in the tx
const sourceAccounts = getAllSourceAccounts(this.tx)
//load information about every source account directly from StellarCore database
const sourceAccountsInfo = await loadAccountsInfo(normalizeNetworkName(this.txInfo.network), sourceAccounts)
//discover signers
this.schema = await inspectTransactionSigners(this.tx, {horizon, accountsInfo: sourceAccountsInfo})
//get all signers that can potentially sign the transaction
this.potentialSigners = this.schema.getAllPotentialSigners()
return this
}
get isReady() {
return this.schema.checkFeasibility(this.txInfo.signatures.map(s => s.key))
}
/**
* @param {Object} rawSignature
*/
processSignature(rawSignature) {
//get props from the raw signature
const {hint, signature} = rawSignature._attributes
//init wrapped signature object
const signaturePair = new TxSignature()
signaturePair.signature = signature
//find matching signer from potential signers list
signaturePair.key = this.potentialSigners
.find(key => hintMatchesKey(hint, key) && this.verifySignature(key, signature))
//verify the signature
if (signaturePair.key) {
//filter out duplicates
if (!this.txInfo.signatures.some(s => s.key === signaturePair.key)) {
//add to the valid signatures list
this.txInfo.signatures.push(signaturePair)
this.accepted.push(signaturePair)
}
} else {
signaturePair.key = hintToMask(hint)
this.rejected.push(signaturePair)
}
}
verifySignature(key, signature) {
return Keypair.fromPublicKey(key).verify(this.hashRaw, signature)
}
processNewSignatures() {
if (!this.signaturesToProcess.length) return
//skip existing
const newSignatures = this.signaturesToProcess.filter(sig => {
const newSignature = sig.signature().toString('base64')
return !this.txInfo.signatures.some(existing => existing.signature === newSignature)
})
//search for invalid signature
for (let signature of newSignatures) {
this.processSignature(signature)
}
//save changes if any
if (this.accepted.length && this.status !== 'created') {
this.setStatus('updated')
}
this.signaturesToProcess = []
}
async saveChanges() {
//save changes if any
if (!['created', 'updated'].includes(this.status)) return
if (!this.txInfo.status) {
this.txInfo.status = 'pending'
}
if (this.txInfo.status === 'pending' && this.isReady) {
this.txInfo.status = 'ready'
}
await storageLayer.dataProvider.saveTransaction(this.txInfo)
}
/**
* @param {'draft'|'created'|'updated'|'unchanged'} newStatus
*/
setStatus(newStatus) {
if (this.status === 'created' || this.status === 'updated') return
this.status = newStatus
}
toJSON() {
return {...rehydrateTx(this.txInfo), changes: {accepted: this.accepted, rejected: this.rejected}}
}
}
module.exports = Signer
<file_sep>import {stringifyQuery} from './url-utils'
import {apiEndpoint} from '../app.config.json'
/**
* Retrieve data from the server API endpoint.
* @param {string} relativeApiPath - API endpoint path.
* @param {object} [data] - Request payload.
* @param {object} [params] - Request params.
* @param {'GET'|'POST'|'PUT'|'DELETE'} [params.method] - HTTP method to use (GET by default)..
* @return {Promise<object>}
*/
export function apiCall(relativeApiPath, data, params) {
params = {method: 'GET', includeNetwork: true, ...params}
let fetchParams = {}
let url = `${apiEndpoint}/${relativeApiPath}`
if (params.method && params.method !== 'GET') {
fetchParams = {
...params,
body: JSON.stringify(data),
headers: {
'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json'
}
}
} else {
url += stringifyQuery(data)
}
return fetch(url, fetchParams)
.then(resp => {
if (!resp.ok)
return resp.json()
.catch(e => ({}))
.then(ext => {
const err = new Error(ext.error || resp.statusText)
err.error = resp.statusText
err.status = resp.status
err.ext = ext
return Promise.reject(err)
})
return resp.json()
})
}<file_sep>class TxModel {
/**
* Transaction hash.
* @type {String}
*/
hash
/**
* Network (0-pubnet or 1-testnet)
* @type {Number}
*/
network
/**
* Transaction XDR without signatures.
* @type {Buffer}
*/
xdr
/**
* Applied transaction signatures.
* @type {TxSignature[]}
*/
signatures
/**
* Submit transactions to the network once signed.
* @type {Boolean}
*/
submit
/**
* Callback URL where the transaction will be sent once signed.
* @type {String}
*/
callbackUrl
/**
* List of signers requested by the tx author.
* @type {String[]}
*/
desiredSigners
/**
* Point in time when a transaction becomes valid (populated from a tx timebounds).
* @type {Number}
*/
minTime
/**
* Expiration date (UNIX timestamp).
* @type {Number}
*/
maxTime
/**
* Current tx status.
* @type {TxStatus}
*/
status
/**
* Submitted transaction timestamp (UNIX timestamp).
* @type {Number}
*/
submitted
}
module.exports = TxModel
/**
* @typedef {'pending'|'ready'|'processing'|'processed'|'failed'} TxStatus
*/<file_sep>const InMemoryDataProvider = require('./inmemory-data-provider'),
path = require('path'),
fs = require('fs').promises
const fileName = path.join(__dirname, '../', 'data.json')
class FsDataProvider extends InMemoryDataProvider {
/**
* @type {Object}
*/
storage
async init() {
try {
const raw = await fs.readFile(fileName)
this.storage = JSON.parse(raw.toString())
} catch (e) {
if (e.code !== 'ENOENT')
throw e
this.storage = {}
}
}
async save() {
await fs.writeFile(fileName, JSON.stringify(this.storage, null, ' '))
}
}
module.exports = FsDataProvider<file_sep>function getAllSourceAccounts(tx) {
//handle fee bump tx
if (tx.feeSource && tx.innerTransaction) {
//add tx source account by default
return [tx.feeSource]
}
//regular tx
const sources = {[tx.source]: 1}
//process source account for each operation
for (let operation of tx.operations)
if (operation.source) {
sources[operation.source] = 1
}
//return only unique entries
return Object.keys(sources)
}
module.exports = {getAllSourceAccounts}<file_sep>const createQueue = require('fastq'),
storageLayer = require('../../storage/storage-layer'),
{rehydrateTx} = require('../tx-loader'),
{processCallback} = require('./callback-handler'),
{submitTransaction} = require('./horizon-handler'),
{getUnixTimestamp} = require('../timestamp-utils')
class Finalizer {
constructor() {
this.finalizerQueue = createQueue(this.processTx, 50) //max 50 tasks in parallel
}
finalizerQueue
targetQueueSize = 200
tickerTimeout = 5000
processorTimerHandler = -1
async scheduleTransactionsBatch() {
try {
const now = getUnixTimestamp()
//get transactions ready to be submitted
const cursor = await storageLayer.dataProvider.listTransactions({status: 'ready', minTime: {$lte: now}})
for await (let txInfo of cursor) {
if (this.processorTimerHandler === 0) //pipeline stop executed
return
this.finalizerQueue.push(txInfo)
//the queue length should not exceed the max queue size
if (this.finalizerQueue.length() >= this.targetQueueSize)
break
}
} catch (e) {
console.error(e)
}
this.processorTimerHandler = setTimeout(() => this.scheduleTransactionsBatch(), this.tickerTimeout) // wait 5 seconds to drain the queue and check for new entries
}
setQueueConcurrency(concurrency) {
this.finalizerQueue.concurrency = concurrency
}
/**
* Process fully signed tx
* @param {TxModel} txInfo
* @param {Function} cb
*/
async processTx(txInfo, cb) {
if (txInfo.status !== 'ready') return
try {
//lock tx
if (!await storageLayer.dataProvider.updateTxStatus(txInfo.hash, 'processing', 'ready'))
return //failed to obtain a lock - some other thread is currently processing this transaction
} catch (e) {
console.error(e)
return //invalid current state
}
try {
if (txInfo.maxTime < getUnixTimestamp())
throw new Error(`Transaction has already expired`)
const txInfoFull = rehydrateTx(txInfo)
const update = {status: 'processed'}
if (txInfo.callbackUrl) {
await processCallback(txInfoFull)
}
if (txInfo.submit) {
await submitTransaction(txInfoFull)
update.submitted = getUnixTimestamp()
}
if (!await storageLayer.dataProvider.updateTransaction(txInfo.hash, update, 'processing'))
throw new Error(`State conflict after callback execution`)
} catch (e) {
console.error(e)
await storageLayer.dataProvider.updateTxStatus(txInfo.hash, 'failed', 'processing')
cb(e)
return
}
cb(null)
}
start() {
this.scheduleTransactionsBatch()
.catch(e => console.error(e))
}
async stop() {
clearTimeout(this.processorTimerHandler)
this.processorTimerHandler = 0
//clear the pending queue
this.finalizerQueue.kill()
//wait for all current tasks to finish
return new Promise(resolve => {
let finalizerCheckInterval = setInterval(() => {
if (this.finalizerQueue.idle()) {
clearInterval(finalizerCheckInterval)
resolve()
}
}, 300)
})
}
async resetProcessingStatus() {
const cursor = await storageLayer.dataProvider.listTransactions({status: 'processing'})
for await (let txInfo of cursor) {
await storageLayer.dataProvider.updateTxStatus(txInfo.hash, 'ready', 'processing')
}
}
}
const finalizer = new Finalizer()
module.exports = finalizer<file_sep>const {Keypair} = require('stellar-sdk')
/**
* Convert the signature hint to the StrKey mask.
* @param {Buffer} hint - Hint to convert.
* @return {string}
*/
function hintToMask(hint) {
const partialPublicKey = Buffer.concat([Buffer.alloc(28), hint]),
hintKeypair = new Keypair({type: 'ed25519', publicKey: partialPublicKey}),
pk = hintKeypair.publicKey()
return pk.substr(0, 1) + '_'.repeat(46) + pk.substr(47, 5) + '_'.repeat(4)
}
/**
* Format the signature hint to the friendly form for UI.
* @param {Buffer} hint - Hint to convert.
* @return {string}
*/
function formatHint(hint) {
const mask = hintToMask(hint)
return mask.substr(0, 2) + '…' + mask.substr(46)
}
/**
* Check if the hint matches the specific key.
* @param {Buffer} hint - Hint to check.
* @param {String} key - Key to compare.
* @return {boolean}
*/
function hintMatchesKey(hint, key) {
return hintToMask(hint).substr(47, 5) === key.substr(47, 5)
}
/**
* Find matching key by the signature hint from a list of available keys.
* @param {Buffer} hint - Hint to look for.
* @param {Array<String>} allKeys - Array of potentially matching keys.
* @return {String|null}
*/
function findKeysByHint(hint, allKeys) {
return allKeys.find(key => hintMatchesKey(hint, key))
}
/**
* Find a signature by public key from the list of signatures.
* @param {String} pubkey
* @param {Array<TxSignature>} allSignatures
* @returns {*}
*/
function findSignatureByKey(pubkey, allSignatures = []) {
const matchingSignatures = allSignatures.filter(sig => hintMatchesKey(sig.hint(), pubkey))
return matchingSignatures.find(sig=>Keypair.fromPublicKey(pubkey))
}
module.exports = {
hintToMask,
hintMatchesKey,
formatHint,
findKeysByHint,
findSignatureByKey
}<file_sep>import React from 'react'
import {render} from 'react-dom'
import Router from './router'
import nav, {bindClickNavHandler} from '../infrastructure/nav'
import './styles/styles.scss'
const appContainer = document.createElement('div')
bindClickNavHandler(appContainer)
render(<Router history={nav.history}/>, appContainer)
const preLoader = document.getElementById('pre-loader')
preLoader.parentNode.removeChild(preLoader)
document.body.appendChild(appContainer) | 4f250b9d5438a5761b453d7c0b64e5f5e005f75e | [
"JavaScript"
] | 35 | JavaScript | BangkokCryp/stellar-expert-refractor | 6eb81d74d2178712639f885caaaa3fab6df9adfa | c02b5687d5822c087ca1acdbf46d83ea6bb8921e |
refs/heads/master | <repo_name>robojeeves/PC1.04<file_sep>/README.md
# Patriot Checkpoint V1.04
# content_main.XML
This is the main log-in screen.
# student_advisory_scroll_view.XML
This is the course progress overview display screen
# required_course_list.XML
# completed_course_list.XML
These Screens display the courses required/completed in a scrollable list
# app_bar.XML
Layout for the top navigation bar
<file_sep>/app/src/main/java/com/example/roboj/a_charm/ConnectSQL.java
package com.example.roboj.a_charm;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* Created by roboj on 11/12/2017.
*/
public class ConnectSQL {
// Student database connection fields
private static final String driver = "com.mysql.jdbc.Driver";
private static final String url = "jdbc:mysql://172.16.58.3:21/teamthree/login.php";//jdbc:mysql://172.16.58.3:21/cosc5384.us
private static final String USER = "<EMAIL>";
private static final String PASS = "<PASSWORD>";
public boolean makeConnection(String email, String password) {
Connection connection = null;
PreparedStatement statement = null;
try { // Try to login
Class.forName(driver);
Connection conn = DriverManager.getConnection(url, USER, PASS);
statement = conn.prepareStatement("SELECT * FROM Student WHERE Email = ? and Password = ?");
statement.setString(1, email);
statement.setString(2, password);
ResultSet rset = statement.executeQuery();
if (rset.next()) {
rset.close();
statement.close();
connection.close();
return true;
} else {
return false;
}
} catch (SQLException se) {
se.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
return false;
}
}
}
<file_sep>/app/src/main/java/com/example/roboj/a_charm/DBConnector.java
package com.example.roboj.a_charm;
import android.app.AlertDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.os.AsyncTask;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
/**
* Created by lwu on 11/10/2017.
*/
public class DBConnector extends AsyncTask<String,Void,String> {
public static final String LOGIN_FILE = "login.php";
public static final String COMPLETE_FILE = "getcomplete.php";
public static final String DEGREE_PLAN_FILE = "getdegreeplan.php";
public static final String RECORD_DIVIDER = "~RECORD~";
public static final String FIELD_DIVIDER = "~FIELD~";
Context context;
AlertDialog alertDialog;
private String email;
private String password;
private String phpFile;
DBConnector (Context ctx) {
context = ctx;
}
@Override
protected String doInBackground(String... params) {
phpFile = params[0];
String result="";
// DO NOT CHANGE cosc5384.us
// replace yourTeamUsername
String login_url = "http://cosc5384.us/teamthree/"+phpFile;
try {
email = params[1];
password = params[2];
URL url = new URL(login_url);
HttpURLConnection httpURLConnection = (HttpURLConnection)url.openConnection();
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoOutput(true);
httpURLConnection.setDoInput(true);
OutputStream outputStream = httpURLConnection.getOutputStream();
BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"));
String post_data = URLEncoder.encode("email","UTF-8")+"="+URLEncoder.encode(email,"UTF-8")+"&"
+URLEncoder.encode("password","UTF-8")+"="+URLEncoder.encode(password,"UTF-8");
bufferedWriter.write(post_data);
bufferedWriter.flush();
bufferedWriter.close();
outputStream.close();
InputStream inputStream = httpURLConnection.getInputStream();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream,"iso-8859-1"));
String line="";
while((line = bufferedReader.readLine())!= null) {
result += line;
}
bufferedReader.close();
inputStream.close();
httpURLConnection.disconnect();
return result;
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPreExecute() {
alertDialog = new AlertDialog.Builder(context).create();
alertDialog.setTitle("Login Status");
}
@Override
protected void onPostExecute(String result) {
alertDialog.setMessage(result);
//alertDialog.show();
//if connection error occurs, throw an error message
// Log.i("abcxyz", result);
if(phpFile.contains(LOGIN_FILE)) {
if (!result.contains("true")) {
Toast toast = Toast.makeText(context, "Login Error", Toast.LENGTH_SHORT);
View view = toast.getView();
view.setBackgroundResource(android.R.drawable.toast_frame);
//Get the TextView for the toast message so you can customize
TextView toastMessage = (TextView) view.findViewById(android.R.id.message);
//Set background color for the text.
toastMessage.setBackgroundColor((Color.parseColor("#646464")));
toast.show();
}
//if connection succeeds, move to your next activity
else {
Toast toast = Toast.makeText(context, "Login Successfully", Toast.LENGTH_SHORT);
View view = toast.getView();
view.setBackgroundResource(android.R.drawable.toast_frame);
//Get the TextView for the toast message so you can customize
TextView toastMessage = (TextView) view.findViewById(android.R.id.message);
//Set background color for the text.
toastMessage.setBackgroundColor((Color.parseColor("#646464")));
toast.show();
Intent intent = new Intent(context, StudentAdvising.class);
intent.putExtra("email", email);
intent.putExtra("password", <PASSWORD>);
context.startActivity(intent);
}
}
}
@Override
protected void onProgressUpdate(Void... values) {
super.onProgressUpdate(values);
}
}
| d3fc882060efbb11ca34db23c60b474013e562c8 | [
"Markdown",
"Java"
] | 3 | Markdown | robojeeves/PC1.04 | 1ec2907835036741b8e0bf65fbaab46b617d6e9c | f4ca2f8e0a249342580a83f2fa5c6bb75a23b271 |
refs/heads/master | <repo_name>NullByte08/LoginAndSignupScreen2<file_sep>/app/src/main/java/com/example/loginandsignupscreen2/MainActivity.java
package com.example.loginandsignupscreen2;
import androidx.appcompat.app.AppCompatActivity;
import androidx.constraintlayout.widget.ConstraintLayout;
import android.animation.Animator;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewPropertyAnimator;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.google.android.material.textfield.TextInputLayout;
public class MainActivity extends AppCompatActivity {
private ImageView logoImage;
private TextInputLayout emailTextInput;
private TextInputLayout passTextInput;
private TextView forPassText;
private Button loginButton;
private TextView signupTextView;
private ConstraintLayout constraintLayout;
@Override
protected void onCreate(Bundle savedInstanceState) {
setTheme(R.style.AppTheme);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initializeViews();
constraintLayout.setVisibility(View.GONE);
startAnimation();
}
public void initializeViews(){
constraintLayout=findViewById(R.id.afterAnimationConstraintLayout);
logoImage=findViewById(R.id.logoImage);
emailTextInput=findViewById(R.id.emailInputLayout);
passTextInput=findViewById(R.id.passwordInputLayout);
forPassText=findViewById(R.id.forPassTextView);
loginButton=findViewById(R.id.loginButton);
signupTextView=findViewById(R.id.signupTextView);
}
public void startAnimation(){
ObjectAnimator scaleDownX = ObjectAnimator.ofFloat(logoImage, "scaleX", 0.45f);
ObjectAnimator scaleDownY = ObjectAnimator.ofFloat(logoImage, "scaleY", 0.45f);
scaleDownX.setDuration(1000);
scaleDownY.setDuration(1000);
ObjectAnimator moveUpY = ObjectAnimator.ofFloat(logoImage, "translationY", -400);
moveUpY.setDuration(800);
AnimatorSet scaleDown = new AnimatorSet();
final AnimatorSet moveUp = new AnimatorSet();
scaleDown.play(scaleDownX).with(scaleDownY);
moveUp.play(moveUpY);
scaleDown.start();
new CountDownTimer(200,100){
@Override
public void onFinish() {
moveUp.start();
}
@Override
public void onTick(long l) {
}
}.start();
scaleDown.addListener(new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animator) {
}
@Override
public void onAnimationEnd(Animator animator) {
constraintLayout.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationCancel(Animator animator) {
}
@Override
public void onAnimationRepeat(Animator animator) {
}
});
/*ViewPropertyAnimator viewPropertyAnimator=logoImage.animate();
float seventysix= TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,76,this.getResources().getDisplayMetrics());
float onefiftyfive= TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,155,this.getResources().getDisplayMetrics());
float hundred= TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,100,this.getResources().getDisplayMetrics());
viewPropertyAnimator.translationY(76);
viewPropertyAnimator.translationX(155);
viewPropertyAnimator.setDuration(10000);
viewPropertyAnimator.scaleX(100);
viewPropertyAnimator.scaleY(-hundred);
viewPropertyAnimator.setListener(new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animator) {
}
@Override
public void onAnimationEnd(Animator animator) {
constraintLayout.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationCancel(Animator animator) {
}
@Override
public void onAnimationRepeat(Animator animator) {
}
});*/
}
}
| a04393bb967e48e013f658ccea71666fc1129617 | [
"Java"
] | 1 | Java | NullByte08/LoginAndSignupScreen2 | 57e0a6a899d52ffa1d3cdb5eb199b55d15c2c328 | 553593e6b0990f13494c44555786a22aabc04c85 |
refs/heads/main | <repo_name>MrXujiang/xu-pro-lerna<file_sep>/scripts/gen_less_entry.js
const fs = require('fs');
const { join } = require('path');
const fg = require('fast-glob');
// 用于转换 Windows 反斜杠路径转换为正斜杠路径 \ => /
const slash = require('slash');
const pkgList = fs
.readdirSync(join(__dirname, '../', 'packages'))
.filter((pkg) => pkg.charAt(0) !== '.');
pkgList.map(async (path) => {
const baseUrl = slash(`${join(__dirname, '../', 'packages')}/${path}/src`);
const lessFiles = await fg(`${baseUrl}/**/*.less`, {
ignore: ['**/demos/**'],
deep: 5,
});
const importFiles = lessFiles.map((lessPath) => {
return `@import "../es${lessPath.replace(baseUrl, '')}";`;
});
const distPath = slash(`${join(__dirname, '../', 'packages', path, 'dist', `${path}.less`)}`);
// console.log(11, distPath, importFiles)
fs.writeFileSync(distPath, importFiles.join('\n'));
});
<file_sep>/packages/utils/src/typing.ts
import type { FormInstance, FormItemProps } from 'antd/lib/form';
import type { LabelTooltipType } from 'antd/lib/form/FormItemLabel';
import type { NamePath } from 'antd/lib/form/interface';
import type { Moment } from 'moment';
import type { ReactNode } from 'react';
import type { UseEditableUtilType } from './useEditableArray';
export type PageInfo = {
pageSize: number;
total: number;
current: number;
};
/**
* @param textarea 文本框
* @param password 密码框
* @param money 金额 option 操作 需要返回一个数组
* @param date 日期 YYYY-MM-DD
* @param dateWeek 周选择器
* @param dateMonth 月选择器
* @param dateQuarter 季度选择器
* @param dateYear 年选择器
* @param dateRange 日期范围 YYYY-MM-DD[]
* @param dateTime 日期和时间 YYYY-MM-DD HH:mm:ss
* @param dateTimeRange 范围日期和时间 YYYY-MM-DD HH:mm:ss[]
* @param time: 时间 HH:mm:ss
* @param timeRange: 时间区间 HH:mm:ss[]
* @param index:序列
* @param indexBorder:序列
* @param progress: 进度条
* @param percent: 百分比
* @param digit 数值
* @param second 秒速
* @param fromNow 相对于当前时间
* @param avatar 头像
* @param code 代码块
* @param image 图片设置
* @param jsonCode Json 的代码块,格式化了一下
* @param color 颜色选择器
*/
export type ProFieldValueType =
| 'text'
| 'password'
| 'money'
| 'textarea'
| 'option'
| 'date'
| 'dateWeek'
| 'dateMonth'
| 'dateQuarter'
| 'dateYear'
| 'dateRange'
| 'dateTimeRange'
| 'dateTime'
| 'time'
| 'timeRange'
| 'select'
| 'checkbox'
| 'rate'
| 'radio'
| 'radioButton'
| 'index'
| 'indexBorder'
| 'progress'
| 'percent'
| 'digit'
| 'second'
| 'avatar'
| 'code'
| 'switch'
| 'fromNow'
| 'image'
| 'jsonCode'
| 'color';
export type RequestOptionsType = {
label?: React.ReactNode;
value?: React.ReactText;
/** 渲染的节点类型 */
optionType?: 'optGroup' | 'option';
options?: Omit<RequestOptionsType, 'children' | 'optionType'>[];
[key: string]: any;
};
export type ProFieldRequestData<U = any> = (params: U, props: any) => Promise<RequestOptionsType[]>;
export type ProFieldValueEnumType = ProSchemaValueEnumMap | ProSchemaValueEnumObj;
// function return type
export type ProFieldValueObjectType = {
type: 'progress' | 'money' | 'percent' | 'image';
status?: 'normal' | 'active' | 'success' | 'exception' | undefined;
locale?: string;
/** Percent */
showSymbol?: ((value: any) => boolean) | boolean;
showColor?: boolean;
precision?: number;
moneySymbol?: boolean;
request?: ProFieldRequestData;
/** Image */
width?: number;
};
export type ProSchemaValueEnumType = {
/** @name 演示的文案 */
text: ReactNode;
/** @name 预定的颜色 */
status: string;
/** @name 自定义的颜色 */
color?: string;
/** @name 是否禁用 */
disabled?: boolean;
};
/**
* 支持 Map 和 Record<string,any>
*
* @name ValueEnum 的类型
*/
export type ProSchemaValueEnumMap = Map<React.ReactText, ProSchemaValueEnumType | ReactNode>;
export type ProSchemaValueEnumObj = Record<string, ProSchemaValueEnumType | ReactNode>;
export type ProFieldTextType = React.ReactNode | React.ReactNode[] | Moment | Moment[];
export type SearchTransformKeyFn = (
value: any,
field: string,
object: any,
) => string | Record<string, any>;
export type ProTableEditableFnType<T> = (_: any, record: T, index: number) => boolean;
// 支持的变形,还未完全支持完毕
/** 支持的变形,还未完全支持完毕 */
export type ProSchemaComponentTypes =
| 'form'
| 'list'
| 'descriptions'
| 'table'
| 'cardList'
| undefined;
/** 操作类型 */
export type ProCoreActionType<T = {}> = {
/** @name 刷新 */
reload: (resetPageIndex?: boolean) => Promise<void>;
/** @name 刷新并清空,只清空页面,不包括表单 */
reloadAndRest?: () => Promise<void>;
/** @name 重置任何输入项,包括表单 */
reset?: () => void;
/** @name 清空选择 */
clearSelected?: () => void;
/** @name p页面的信息都在里面 */
pageInfo?: PageInfo;
} & Omit<
UseEditableUtilType,
'newLineRecord' | 'editableKeys' | 'actionRender' | 'setEditableRowKeys'
> &
T;
type ProSchemaValueType<ValueType> = (ValueType | ProFieldValueType) | ProFieldValueObjectType;
/** 各个组件公共支持的 render */
export type ProSchema<
Entity = Record<string, any>,
ExtraProps = unknown,
ComponentsType = ProSchemaComponentTypes,
ValueType = 'text',
> = {
/** @name 确定这个列的唯一值,一般用于 dataIndex 重复的情况 */
key?: React.Key;
/**
* 支持一个数字,[a,b] 会转化为 obj.a.b
*
* @name 与实体映射的key
*/
dataIndex?: string | number | (string | number)[];
/** 选择如何渲染相应的模式 */
valueType?:
| ((entity: Entity, type: ComponentsType) => ProSchemaValueType<ValueType>)
| ProSchemaValueType<ValueType>;
/**
* 支持 ReactNode 和 方法
*
* @name 标题
*/
title?:
| ((
schema: ProSchema<Entity, ExtraProps>,
type: ComponentsType,
dom: React.ReactNode,
) => React.ReactNode)
| React.ReactNode;
/** @name 展示一个 icon,hover 是展示一些提示信息 */
tooltip?: LabelTooltipType | string;
/** @deprecated 你可以使用 tooltip,这个更改是为了与 antd 统一 */
tip?: string;
/**
* 支持 object 和Map,Map 是支持其他基础类型作为 key
*
* @name 映射值的类型
*/
valueEnum?:
| ((row: Entity) => ProSchemaValueEnumObj | ProSchemaValueEnumMap)
| ProSchemaValueEnumObj
| ProSchemaValueEnumMap;
/** 自定义的 fieldProps render */
fieldProps?:
| ((
form: FormInstance<any>,
config: ProSchema<Entity, ExtraProps> & {
type: ComponentsType;
isEditable?: boolean;
rowKey?: string;
rowIndex: number;
entity: Entity;
},
) => Record<string, any>)
| Record<string, any>
| {
placeholder?: string;
maxLength?: number;
[key: string]: any;
};
/** @name 自定义的 formItemProps */
formItemProps?:
| FormItemProps
| ((
form: FormInstance<any>,
config: ProSchema<Entity, ExtraProps> & {
type: ComponentsType;
isEditable?: boolean;
rowKey?: string;
rowIndex: number;
entity: Entity;
},
) => FormItemProps);
/**
* 修改的数据是会被 valueType 消费
*
* @name 自定义 render 内容
*/
renderText?: (text: any, record: Entity, index: number, action: ProCoreActionType) => any;
/**
* Render 方法只管理的只读模式,编辑模式需要使用 renderFormItem
*
* @name 自定义只读模式的dom
*/
render?: (
dom: React.ReactNode,
entity: Entity,
index: number,
action: ProCoreActionType | undefined,
schema: ProSchema<Entity, ExtraProps, ComponentsType, ValueType> & {
isEditable?: boolean;
type: ComponentsType;
},
) => React.ReactNode;
/**
* 返回一个 ReactNode,会自动包裹 value 和 onChange
*
* @name 自定义编辑模式
*/
renderFormItem?: (
schema: ProSchema<Entity, ExtraProps, ComponentsType, ValueType> & {
isEditable?: boolean;
index?: number;
type: ComponentsType;
originProps?: any;
},
config: {
onSelect?: (value: any) => void;
onChange?: <T = any>(value: T) => void;
value?: any;
type: ComponentsType;
recordKey?: React.Key | React.Key[];
record?: Entity;
isEditable?: boolean;
defaultRender: (
newItem: ProSchema<Entity, ExtraProps, ComponentsType, ValueType>,
) => JSX.Element | null;
},
form: FormInstance,
) => React.ReactNode;
/** 可编辑表格是否可编辑 */
editable?: false | ProTableEditableFnType<Entity>;
/** @name 从服务器请求枚举 */
request?: ProFieldRequestData;
/** @name 从服务器请求的参数,改变了会触发 reload */
params?: Record<string, any>;
/** @name 依赖字段的name,暂时只在拥有 request 的项目中生效,会自动注入到 params 中 */
dependencies?: NamePath[];
/** @name 在 descriptions 隐藏 */
hideInDescriptions?: boolean;
/** @name 在 Form 中隐藏 */
hideInForm?: boolean;
/** @name 在 table 中隐藏 */
hideInTable?: boolean;
/** @name 在 table的查询表单 中隐藏 */
hideInSearch?: boolean;
/** 设置到 ProField 上面的 Props,内部属性 */
proFieldProps?: ProFieldProps;
} & ExtraProps;
export interface ProFieldProps {
light?: boolean;
emptyText?: ReactNode;
label?: React.ReactNode;
mode?: 'read' | 'edit';
/** 这个属性可以设置useSwr的key */
proFieldKey?: string;
render?: any;
}
<file_sep>/docs/index.en-US.md
---
title: ProComponents - Page level front-end components
order: 10
sidebar: false
hero:
title: ProComponents
desc: 🏆 Make middle and backstage development easier
actions:
- text: 🥳 quick-start →
link: /en-US/docs/getting-started
features:
- icon: https://gw.alipayobjects.com/os/q/cms/images/k9ziitmp/13668549-b393-42a2-97c3-a6365ba87ac2_w96_h96.png
title: Easy to use
desc: Wrapped in Ant Design to make it easier to use
- icon: https://gw.alipayobjects.com/os/q/cms/images/k9ziik0f/487a2685-8f68-4c34-824f-e34c171d0dfd_w96_h96.png
title: Ant Design
desc: The same design system as Ant Design, seamlessly connects to antd project
- icon: https://gw.alipayobjects.com/os/q/cms/images/k9ziip85/89434dcf-5f1d-4362-9ce0-ab8012a85924_w96_h96.png
title: Internationalization
desc: Provides complete internationalization language support, and connects to the Ant Design system
- icon: https://gw.alipayobjects.com/mdn/rms_05efff/afts/img/A*-3XMTrwP85wAAAAAAAAAAAAABkARQnAQ
title: preset style
desc: The style is the same as antd, no need to change it, it's a natural fit
- icon: https://gw.alipayobjects.com/os/q/cms/images/k9ziieuq/decadf3f-b53a-4c48-83f3-a2faaccf9ff7_w96_h96.png
title: preset behavior
desc: Less code, less bugs
- icon: https://gw.alipayobjects.com/os/q/cms/images/k9zij2bh/67f75d56-0d62-47d6-a8a5-dbd0cb79a401_w96_h96.png
title: TypeScript
desc: Development with TypeScript, complete with type definition files
footer: Open-source MIT Licensed | © 2017-present
---
## Component Board
| Components | Downloads | Versions |
| --- | --- | --- |
| pro-layout | [](https://www.npmjs.com/package/@ant-design/pro-layout) | [](https://www.npmjs.com/package/@ant-design/pro-layout) |
| pro-table | [](https://www.npmjs.com/package/@ant-design/pro-table) | [](https://www.npmjs.com/package/@ant-design/pro-table) |
| pro-field | [](https://www.npmjs.com/package/@ant-design/pro-field) | [](https://www.npmjs.com/package/@ant-design/pro-field) |
| pro-form | [](https://www.npmjs.com/package/@ant-design/pro-form) | [](https://www.npmjs.com/package/@ant-design/pro-form) |
| pro-skeleton | [](https://www.npmjs.com/package/@ant-design/pro-skeleton) | [](https://www.npmjs.com/package/@ant-design/pro-skeleton) |
| pro-list | [](https://www.npmjs.com/package/@ant-design/pro-list) | [](https://www.npmjs.com/package/@ant-design/pro-list) |
| pro-card | [](https://www.npmjs.com/package/@ant-design/pro-card) | [](https://www.npmjs.com/package/@ant-design/pro-card) |
| pro-descriptions | [](https://www.npmjs.com/package/@ant-design/pro-descriptions) | [](https://www.npmjs.com/package/@ant-design/pro-descriptions) |
## 🖥 Browser Compatibility
- Modern browsers and Internet Explorer 11 (with [polyfills](https://stackoverflow.com/questions/57020976/polyfills-in-2019-for-ie11))
- [Electron](https://www.electronjs.org/)
| [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) |
| --- | --- | --- | --- | --- |
| IE11, Edge | last 2 versions | last 2 versions | last 2 versions | last 2 versions |
<file_sep>/packages/utils/src/merge/index.ts
/* eslint-disable prefer-rest-params */
const merge = <T>(...rest: any[]): T => {
const obj = {};
const il = rest.length;
let key;
let i = 0;
for (; i < il; i += 1) {
// eslint-disable-next-line no-restricted-syntax
for (key in rest[i]) {
if (rest[i].hasOwnProperty(key)) {
obj[key] = rest[i][key];
}
}
}
return obj as T;
};
export { merge };
<file_sep>/docs/components.md
---
title: 组件总览
order: 0
group:
path: /
nav:
title: 组件
path: /components
---
# 架构设计
ProComponents 是基于 Ant Design 而开发的模板组件,提供了更高级别的抽象支持,开箱即用。可以显著的提升制作 CRUD 页面的效率,更加专注于页面。
- [ProSkeleton](/components/skeleton) 页面级别的骨架屏
> 如果您是阿里内网用户,欢迎尝试使用 [TechUI](https://techui.alipay.com)。TechUI 在封装 ProComponents 的基础上还提供了丰富的 Ant Design 扩展组件。
<file_sep>/jest.config.js
const { readdirSync } = require('fs');
const { join } = require('path');
const pkgList = readdirSync(join(__dirname, './packages')).filter((pkg) => pkg.charAt(0) !== '.');
const moduleNameMapper = {};
pkgList.forEach((shortName) => {
const name = `@ant-design/pro-${shortName}`;
moduleNameMapper[name] = join(__dirname, `./packages/${shortName}/src`);
});
module.exports = {
collectCoverageFrom: [
'packages/**/src/**/*.{ts,tsx}',
'!packages/**/src/demos/**',
'!packages/**/src/**/demos/**',
'!packages/utils/src/isDeepEqualReact/*.{ts,tsx}',
],
moduleNameMapper,
testURL:
'http://localhost?navTheme=realDark&layout=mix&primaryColor=daybreak&splitMenus=false&fixedHeader=true',
verbose: true,
snapshotSerializers: [require.resolve('enzyme-to-json/serializer')],
extraSetupFiles: ['./tests/setupTests.js'],
globals: {
ANT_DESIGN_PRO_ONLY_DO_NOT_USE_IN_YOUR_PRODUCTION: false,
},
};
<file_sep>/packages/utils/src/omitBoolean/index.ts
const omitBoolean = <T>(obj: boolean | T): T | undefined => {
if (obj && obj !== true) {
return obj;
}
return undefined;
};
export default omitBoolean;
<file_sep>/packages/utils/src/isBrowser/index.ts
const isNode =
typeof process !== 'undefined' && process.versions != null && process.versions.node != null;
const isBrowser = () => {
if (process.env.NODE_ENV === 'TEST') {
return true;
}
return typeof window !== 'undefined' && typeof window.document !== 'undefined' && !isNode;
};
export default isBrowser;
<file_sep>/packages/list/src/constants.ts
export const PRO_LIST_KEYS = [
'title',
'subTitle',
'avatar',
'description',
'extra',
'content',
'actions',
'type',
];
<file_sep>/docs/components.en-US.md
---
title: Component Overview
order: 0
group:
path: /
nav:
title: Component
path: /components
---
# Architecture Design
ProComponents was developed to reduce the cost of implementing CRUD in the middle and backend, with the idea of reducing the necessary state maintenance and focusing more on the business.
- [ProSkeleton](/components/skeleton) Page level skeleton screen<file_sep>/README.zh-CN.md
[English](./README.en-US.md) | 简体中文 | [Español](./README.es-PR.md)
# ProComponents
这里放置了 pro 系列组件,用于支撑重型组件
   [](https://codecov.io/gh/ant-design/pro-components) [](https://d.umijs.org/) [](https://github.com/umijs/father/) [](https://ant.design) [](https://lerna.js.org/)
## 🖥 浏览器兼容性
- 现代浏览器和 Internet Explorer 11 (with [polyfills](https://stackoverflow.com/questions/57020976/polyfills-in-2019-for-ie11))
- [Electron](https://www.electronjs.org/)
| [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) | [](http://godban.github.io/browsers-support-badges/) |
| --- | --- | --- | --- | --- |
| IE11, Edge | last 2 versions | last 2 versions | last 2 versions | last 2 versions |
## 参与贡献
我们非常欢迎你的贡献,你可以通过以下方式和我们一起共建 😃 :
- 在你的公司或个人项目中使用 Ant Design Pro,umi 和 ProComponents。
- 通过 [Issue](https://github.com/ant-design/pro-components/issues) 报告 bug 或进行咨询。
- 提交 [Pull Request](https://github.com/ant-design/pro-components/pulls) 改进 ProComponents 的代码。
## 组件看板
| 组件 | 下载量 | 版本 |
| --- | --- | --- |
| pro-layout | [](https://www.npmjs.com/package/@ant-design/pro-layout) | [](https://www.npmjs.com/package/@ant-design/pro-layout) |
| pro-table | [](https://www.npmjs.com/package/@ant-design/pro-table) | [](https://www.npmjs.com/package/@ant-design/pro-table) |
| pro-field | [](https://www.npmjs.com/package/@ant-design/pro-field) | [](https://www.npmjs.com/package/@ant-design/pro-field) |
| pro-form | [](https://www.npmjs.com/package/@ant-design/pro-form) | [](https://www.npmjs.com/package/@ant-design/pro-form) |
| pro-skeleton | [](https://www.npmjs.com/package/@ant-design/pro-skeleton) | [](https://www.npmjs.com/package/@ant-design/pro-skeleton) |
| pro-list | [](https://www.npmjs.com/package/@ant-design/pro-list) | [](https://www.npmjs.com/package/@ant-design/pro-list) |
| pro-card | [](https://www.npmjs.com/package/@ant-design/pro-card) | [](https://www.npmjs.com/package/@ant-design/pro-card) |
| pro-descriptions | [](https://www.npmjs.com/package/@ant-design/pro-descriptions) | [](https://www.npmjs.com/package/@ant-design/pro-descriptions) |
## LICENSE
MIT
<file_sep>/docs/faq.md
---
title: FAQ
order: 3
group:
path: /
nav:
title: FAQ
path: /docs
---
## FAQ
以下整理了一些 ProComponents 社区常见的问题和官方答复,在提问之前建议找找有没有类似的问题。此外我们也维护了一个反馈较多 [how to use 标签](https://github.com/ant-design/pro-components/issues?q=is%3Aissue+label%3A%22%F0%9F%A4%B7%F0%9F%8F%BC+How+to+use%22+) 亦可参考。
### ProTable request 返回的数据格式可以自定义吗?
不行的,你可以在 request 中转化一下,或者写个拦截器。
[示例](https://beta-pro.ant.design/docs/request-cn)
### 如何隐藏 ProTable 生成的搜索的 label?
columns 的 title 支持 function 的,你可以这样写
```typescript
title: (_, type) => {
if (type === 'table') {
return '标题';
}
return null;
};
```
### 我没法安装 `ProComponents` 和 `ProComponents` 的依赖,顺便提一句,我在中国大陆。
那啥,试试 [cnpm](http://npm.taobao.org/)和[yarn](https://www.npmjs.com/package/yarn)。
### `Form` 当中 `initialValues`
`ProComponents` 底层也是封装的 [antd](https://ant.design/index-cn) ,所以用法也是和 [antd](https://ant.design/index-cn) 相同。注意 `initialValues` 不能被 `setState` 动态更新,你需要用 `setFieldsValue` 来更新。 `initialValues` 只在 `form` 初始化时生效且只生效一次,如果你需要异步加载推荐使用 `request`,或者 `initialValues ? <Form/> : null`
## 错误和警告
这里是一些你在使用 ProComponents 的过程中可能会遇到的错误和警告,但是其中一些并不是 ProComponents 的 bug。
### Cannot read property 'Provider' of undefined
请确保 antd 的版本 >= `4.11.1`
<file_sep>/packages/utils/src/parseValueToMoment/index.ts
import moment from 'moment';
import isNil from '../isNil';
type DateValue = moment.Moment | moment.Moment[] | string | string[] | number | number[];
const parseValueToMoment = (
value: DateValue,
formatter?: string,
): moment.Moment | moment.Moment[] | null | undefined => {
if (isNil(value) || moment.isMoment(value)) {
return value as moment.Moment | null | undefined;
}
if (Array.isArray(value)) {
return (value as any[]).map((v) => parseValueToMoment(v, formatter) as moment.Moment);
}
return moment(value, formatter);
};
export default parseValueToMoment;
<file_sep>/docs/pro-list.changelog.md
---
title: ProList - 更新日志
nav:
title: 更新日志
path: /changelog
group:
path: /
---
# Change Log
## @ant-design/pro-list@1.14.1
`2021-09-17`
- 🐛 fix(list): title blank placeholder of card [#3680](https://github.com/ant-design/pro-components/pull/3680) [@liuxulian](https://github.com/liuxulian)
## @ant-design/pro-list@1.14.0
`2021-09-16`
- 💥 feat(list): card list support selected [#3666](https://github.com/ant-design/pro-components/pull/3666) [@liuxulian](https://github.com/liuxulian)
## @ant-design/pro-list@1.11.0
`2021-08-03`
- 💥 feat(list): ProList add base proList [#3361](https://github.com/ant-design/pro-components/pull/3361) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.10.11
`2021-07-30`
- 🐛 fix(list): better snapshot update style [#3338](https://github.com/ant-design/pro-components/pull/3338) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.10.9
`2021-07-21`
## @ant-design/pro-list@1.10.8
`2021-07-20`
- 🐛 fix(list): fix form context no render error [#3274](https://github.com/ant-design/pro-components/pull/3274) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.10.7
`2021-07-19`
- 🎨 chore(list): update depend rc-resize-observer [#3206](https://github.com/ant-design/pro-components/pull/3206) [@DerrickTel](https://github.com/DerrickTel)
## @ant-design/pro-list@1.10.2
`2021-06-23`
- 🐛 fix(prolist): title&description wordBreak [#3059](https://github.com/ant-design/pro-components/pull/3059) [@binvb](https://github.com/binvb)
- 🐛 fix(list):fix itemTitleRender no work error [#3069](https://github.com/ant-design/pro-components/pull/3069) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.8.0
`2021-05-25`
- 💥 feat(list): prolist support itemHeaderRender [#2844](https://github.com/ant-design/pro-components/pull/2844) [@chenshuai2144](https://github.com/chenshuai2144)
- 💥 feat(list): list support locale(#2828) [#2840](https://github.com/ant-design/pro-components/pull/2840) [@DerrickTel](https://github.com/DerrickTel)
## @ant-design/pro-list@1.7.4
`2021-05-18`
- 🐛 fix(list): add editable props types [#2758](https://github.com/ant-design/pro-components/pull/2758) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.7.0
`2021-04-22`
- 💥 feat(list): support onRow [#2543](https://github.com/ant-design/pro-components/pull/2543) [@DerrickTel](https://github.com/DerrickTel)
## @ant-design/pro-list@1.6.0
`2021-04-15`
- 🐛 fix(form): fix ProFormUploadButton listType=picture-card style error [#2483](https://github.com/ant-design/pro-components/pull/2483) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.5.11
`2021-03-29`
- 🐛 fix(list): fix list no use dataIndex error [#2298](https://github.com/ant-design/pro-components/pull/2298) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.5.6
`2021-03-11`
- ✨ feat(list): support showExtra(#2090) [#2145](https://github.com/ant-design/pro-components/pull/2145) [@DerrickTel](https://github.com/DerrickTel)
## @ant-design/pro-list@1.5.5
`2021-03-08`
- 🐛 fix(form): fix list key no had error [#2122](https://github.com/ant-design/pro-components/pull/2122) [@chenshuai2144](https://github.com/chenshuai2144)
- 🐛 fix(list): fix nested list copy no work error [#2117](https://github.com/ant-design/pro-components/pull/2117) [@chenshuai2144](https://github.com/chenshuai2144)
- 🐛 fix(form): fix form list copy no work error [#2081](https://github.com/ant-design/pro-components/pull/2081) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.5.4
`2021-02-28`
- 🐛 fix(list): default close rowSelection [#2032](https://github.com/ant-design/pro-components/pull/2032) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.5.3
`2021-02-25`
- 🐛 fix(list): list selectedRow [#1967](https://github.com/ant-design/pro-components/pull/1967) [@xiaohuoni](https://github.com/xiaohuoni)
## @ant-design/pro-list@1.5.1
`2021-02-22`
- 💥 feat(form): support form list [#1908](https://github.com/ant-design/pro-components/pull/1908) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.5.0
`2021-02-04`
- 🐛 fix(list): fix list item no key error [#1836](https://github.com/ant-design/pro-components/pull/1836) [@chenshuai2144](https://github.com/chenshuai2144)
- 💥 feat(list): ProList support renderItem [#1824](https://github.com/ant-design/pro-components/pull/1824) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.4.0
`2021-02-02`
- 💥 feat(list): proList support checkox alert [#1821](https://github.com/ant-design/pro-components/pull/1821) [@chenshuai2144](https://github.com/chenshuai2144)
- ✨ feat(list): support expandIcon(#1710) [#1771](https://github.com/ant-design/pro-components/pull/1771) [@DerrickTel](https://github.com/DerrickTel)
## @ant-design/pro-list@1.3.0
`2021-01-18`
- 🐛 fix(list): fix card list no support Avatar error [#1663](https://github.com/ant-design/pro-components/pull/1663) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.2.18
`2021-01-12`
- 🐛 fix(list): A large number of stylesyou [#1585](https://github.com/ant-design/pro-components/pull/1585) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.2.15
`2021-01-08`
- 🐛 fix(list): the action of a list is no longer bubbled [#1543](https://github.com/ant-design/pro-components/pull/1543) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.2.10
`2021-01-05`
- 🐛 fix(list): fix editable for list [#1481](https://github.com/ant-design/pro-components/pull/1481) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.2.0
`2020-12-24`
- 🐛 fix(list): fix list pagination no work error [#1393](https://github.com/ant-design/pro-components/pull/1393) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@1.1.6
`2020-12-07`
- 🐛 fix(list): actions support react node [#1200](https://github.com/ant-design/pro-components/pull/1200) [@chenshuai2144](https://github.com/chenshuai2144)
- 🐛 fix(list): if title is null, remove header dom [#1191](https://github.com/ant-design/pro-components/pull/1191) [@chenshuai2144](https://github.com/chenshuai2144)
## @ant-design/pro-list@0.0.8
`2020-08-14`
- 🎨 style: list selected 时 hover 样式 ([@WynterDing](https://github.com/WynterDing))[#178](https://github.com/ant-design/pro-components/pull/178)
<file_sep>/packages/utils/src/hooks/useDeepCompareEffect/index.ts
import type { DependencyList } from 'react';
import { useEffect, useRef } from 'react';
import isDeepEqualReact from '../../isDeepEqualReact';
export const isDeepEqual: (a: any, b: any) => boolean = isDeepEqualReact;
function useDeepCompareMemoize(value: any) {
const ref = useRef();
// it can be done by using useMemo as well
// but useRef is rather cleaner and easier
if (!isDeepEqual(value, ref.current)) {
ref.current = value;
}
return ref.current;
}
function useDeepCompareEffect(effect: React.EffectCallback, dependencies: DependencyList = []) {
useEffect(effect, useDeepCompareMemoize(dependencies));
}
export default useDeepCompareEffect;
<file_sep>/packages/utils/src/hooks/useDebounceFn/index.ts
import type { DependencyList } from 'react';
import { useEffect, useRef, useCallback } from 'react';
export type ReturnValue<T extends any[]> = {
run: (...args: T) => void;
cancel: () => void;
};
const useUpdateEffect: typeof useEffect = (effect, deps) => {
const isMounted = useRef(false);
useEffect(() => {
if (!isMounted.current) {
isMounted.current = true;
} else {
return effect();
}
return () => undefined;
}, deps);
};
function useDebounceFn<T extends any[]>(
fn: (...args: T) => Promise<any>,
deps: DependencyList | number,
wait?: number,
): ReturnValue<T> {
// eslint-disable-next-line no-underscore-dangle
const hooksDeps: DependencyList = (Array.isArray(deps) ? deps : []) as DependencyList;
// eslint-disable-next-line no-underscore-dangle
const hookWait: number = typeof deps === 'number' ? deps : wait || 0;
const timer = useRef<any>();
const fnRef = useRef<any>(fn);
fnRef.current = fn;
const cancel = useCallback(() => {
if (timer.current) {
clearTimeout(timer.current);
}
}, []);
const run = useCallback(
async (...args: any): Promise<void> => {
return new Promise((resolve) => {
cancel();
timer.current = setTimeout(async () => {
await fnRef.current(...args);
resolve();
}, hookWait);
});
},
[hookWait, cancel],
);
useUpdateEffect(() => {
run();
return cancel;
}, [...hooksDeps, run]);
useEffect(() => cancel, []);
return {
run,
cancel,
};
}
export default useDebounceFn;
<file_sep>/packages/utils/src/array-move/index.ts
export function arrayMoveMutable<ValueType>(
array: ValueType[],
fromIndex: number,
toIndex: number,
) {
const startIndex = fromIndex < 0 ? array.length + fromIndex : fromIndex;
if (startIndex >= 0 && startIndex < array.length) {
const endIndex = toIndex < 0 ? array.length + toIndex : toIndex;
const [item] = array.splice(fromIndex, 1);
array.splice(endIndex, 0, item);
}
}
export function arrayMoveImmutable<T>(array: T[], fromIndex: number, toIndex: number) {
const newArray = [...array];
arrayMoveMutable(newArray, fromIndex, toIndex);
return newArray;
}
<file_sep>/scripts/utils/getPackages.js
const { readdirSync } = require('fs');
const { join } = require('path');
module.exports = function getPackages() {
return readdirSync(join(__dirname, '../../packages')).filter((pkg) => pkg.charAt(0) !== '.');
};
<file_sep>/packages/utils/src/useMountMergeState/index.ts
import useMergedState from 'rc-util/lib/hooks/useMergedState';
import { useEffect, useRef } from 'react';
type Dispatch<A> = (value: A) => void;
function useMountMergeState<S>(
initialState: S | (() => S),
option?: {
defaultValue?: S;
value?: S;
onChange?: (value: S, prevValue: S) => void;
postState?: (value: S) => S;
},
): [S, Dispatch<S>] {
const mountRef = useRef<boolean>(false);
const frame = useRef<number>(0);
useEffect(() => {
mountRef.current = true;
return () => {
mountRef.current = false;
};
});
const [state, setState] = useMergedState<S>(initialState, option);
const mountSetState: Dispatch<S> = (prevState: S) => {
cancelAnimationFrame(frame.current);
frame.current = requestAnimationFrame(() => {
if (mountRef.current) {
setState(prevState);
}
});
};
return [state, mountSetState];
}
export default useMountMergeState;
<file_sep>/scripts/createRelease.js
const GitHub = require('github');
const exec = require('child_process').execSync;
const fs = require('fs');
const path = require('path');
const github = new GitHub({
debug: process.env.NODE_ENV === 'development',
});
github.authenticate({
type: 'token',
token: process.env.GITHUB_TOKEN || process.env.GITHUB_AUTH,
});
const getChangelog = (content, version) => {
const lines = content.split('\n');
const changeLog = [];
const startPattern = new RegExp(`^## ${version}`);
const stopPattern = /^## /; // 前一个版本
const skipPattern = /^`/; // 日期
let begin = false;
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i];
if (begin && stopPattern.test(line)) {
break;
}
if (begin && line && !skipPattern.test(line)) {
changeLog.push(line);
}
if (!begin) {
begin = startPattern.test(line);
}
}
return changeLog.join('\n');
};
const getMds = (allVersion = false) => {
const docDir = path.join(__dirname, '..', 'docs');
const mdFils = fs.readdirSync(docDir).filter((name) => name.includes('changelog.md'));
mdFils.map((mdFile) => {
const pkg = mdFile.replace('pro-', '').replace('.changelog.md', '');
const content = fs.readFileSync(path.join(docDir, mdFile)).toString();
let versions = [
require(path.join(path.join(__dirname, '..', 'packages', pkg, 'package.json'))).version,
];
if (allVersion) {
versions = exec('git tag')
.toString()
.split('\n')
.filter((tag) => tag.includes(pkg))
.map((tag) => tag.split('@').pop());
}
console.log(versions);
versions.map((version) => {
const versionPkg = `@ant-design/pro-${pkg}@${version}`;
const changeLog = getChangelog(content, versionPkg);
if (!changeLog) {
return;
}
github.repos
.createRelease({
owner: 'ant-design',
repo: 'pro-components',
tag_name: versionPkg,
name: versionPkg,
body: changeLog,
})
.catch((e) => {
console.log(e);
});
});
});
};
getMds();
<file_sep>/packages/utils/src/transformKeySubmitValue/index.ts
import React from 'react';
import type { SearchTransformKeyFn } from '../typing';
import get from 'rc-util/lib/utils/get';
import namePathSet from 'rc-util/lib/utils/set';
import merge from 'lodash.merge';
import isNil from '../isNil';
export type DataFormatMapType = Record<string, SearchTransformKeyFn | undefined>;
const transformKeySubmitValue = <T = any>(
values: T,
dataFormatMapRaw: Record<string, SearchTransformKeyFn | undefined | DataFormatMapType>,
omit: boolean = true,
) => {
// ignore nil transform
const dataFormatMap = Object.keys(dataFormatMapRaw).reduce((ret, key) => {
const value = dataFormatMapRaw[key];
if (!isNil(value)) {
// eslint-disable-next-line no-param-reassign
ret[key] = value! as SearchTransformKeyFn; // can't be undefined
}
return ret;
}, {} as Record<string, SearchTransformKeyFn>);
if (Object.keys(dataFormatMap).length < 1) {
return values;
}
// 如果 value 是 string | null | Blob类型 其中之一,直接返回
// 形如 {key: [File, File]} 的表单字段当进行第二次递归时会导致其直接越过 typeof value !== 'object' 这一判断 https://github.com/ant-design/pro-components/issues/2071
if (typeof values !== 'object' || isNil(values) || values instanceof Blob) {
return values;
}
let finalValues = {} as T;
const gen = (tempValues: T, parentsKey?: React.Key[]) => {
let result = {} as T;
if (tempValues == null || tempValues === undefined) {
return result;
}
Object.keys(tempValues).forEach((entityKey) => {
const key = parentsKey ? [parentsKey, entityKey].flat(1) : [entityKey].flat(1);
const itemValue = tempValues[entityKey];
const transformFunction = get(dataFormatMap, key);
const transform = () => {
const tempKey =
typeof transformFunction === 'function'
? transformFunction?.(itemValue, entityKey, tempValues)
: entityKey;
// { [key:string]:any } 数组也能通过编译
if (Array.isArray(tempKey)) {
result = namePathSet(result, tempKey, itemValue);
return;
}
if (typeof tempKey === 'object') {
finalValues = {
...finalValues,
...tempKey,
};
} else if (tempKey) {
result = namePathSet(result, [tempKey], itemValue);
}
};
/** 如果存在转化器提前渲染一下 */
if (transformFunction && typeof transformFunction === 'function') {
transform();
}
if (
typeof itemValue === 'object' &&
!Array.isArray(itemValue) &&
!React.isValidElement(itemValue) && // ignore walk throungh React Element
!(itemValue instanceof Blob) // ignore walk throungh Blob
) {
const genValues = gen(itemValue, key);
if (Object.keys(genValues).length < 1) {
return;
}
result = namePathSet(result, [entityKey], genValues);
return;
}
transform();
});
// namePath、transform在omit为false时需正常返回 https://github.com/ant-design/pro-components/issues/2901#issue-908097115
return omit ? result : tempValues;
};
finalValues = merge({}, gen(values), finalValues);
return finalValues;
};
export default transformKeySubmitValue;
<file_sep>/tests/doc.test.ts
import demoTest from './demo';
demoTest('docs');
<file_sep>/tests/setupTests.js
import MockDate from 'mockdate';
import Enzyme from 'enzyme';
import 'jest-canvas-mock';
import moment from 'moment-timezone';
import { enableFetchMocks } from 'jest-fetch-mock';
import tableData from './table/mock.data.json';
jest.mock('react', () => ({
...jest.requireActual('react'),
useLayoutEffect: jest.requireActual('react').useEffect,
}));
const eventListener = {};
/* eslint-disable global-require */
if (typeof window !== 'undefined') {
global.window.resizeTo = (width, height) => {
global.window.innerWidth = width || global.window.innerWidth;
global.window.innerHeight = height || global.window.innerHeight;
global.window.dispatchEvent(new Event('resize'));
};
document.addEventListener = (name, cb) => {
eventListener[name] = cb;
};
document.dispatchEvent = (event) => eventListener[event.type]?.(event);
global.window.scrollTo = () => {};
// ref: https://github.com/ant-design/ant-design/issues/18774
if (!window.matchMedia) {
Object.defineProperty(global.window, 'matchMedia', {
writable: true,
configurable: true,
value: jest.fn(() => ({
matches: false,
addListener: jest.fn(),
removeListener: jest.fn(),
})),
});
}
if (!window.matchMedia) {
Object.defineProperty(global.window, 'matchMedia', {
writable: true,
configurable: true,
value: jest.fn((query) => ({
matches: query.includes('max-width'),
addListener: jest.fn(),
removeListener: jest.fn(),
})),
});
}
}
Object.assign(Enzyme.ReactWrapper.prototype, {
findObserver() {
return this.find('ResizeObserver');
},
triggerResize() {
const ob = this.findObserver();
ob.instance().onResize([{ target: ob.getDOMNode() }]);
},
});
enableFetchMocks();
global.requestAnimationFrame =
global.requestAnimationFrame ||
function requestAnimationFrame(cb) {
return setTimeout(cb, 0);
};
global.cancelAnimationFrame =
global.cancelAnimationFrame ||
function cancelAnimationFrame() {
return null;
};
// browserMocks.js
const localStorageMock = (() => {
let store = {
umi_locale: 'zh-CN',
};
return {
getItem(key) {
return store[key] || null;
},
setItem(key, value) {
store[key] = value.toString();
},
removeItem(key) {
store[key] = null;
},
clear() {
store = {};
},
};
})();
Object.defineProperty(window, 'localStorage', {
value: localStorageMock,
});
Object.defineProperty(window, 'cancelAnimationFrame', {
value: () => null,
});
moment.tz.setDefault('UTC');
// 2016-11-22 15:22:44
MockDate.set(1479799364000);
const mockFormatExpression = {
format: (value) => `¥ ${value.toString()}`,
};
Intl.NumberFormat = jest.fn().mockImplementation(() => mockFormatExpression);
Math.random = () => 0.8404419276253765;
fetch.mockResponse(async () => {
return { body: JSON.stringify(tableData) };
});
Object.assign(Enzyme.ReactWrapper.prototype, {
findObserver() {
return this.find('ResizeObserver');
},
triggerResize() {
const ob = this.findObserver();
ob.instance().onResize([{ target: ob.getDOMNode() }]);
},
});
// @ts-ignore-next-line
global.Worker = class {
constructor(stringUrl) {
// @ts-ignore-next-line
this.url = stringUrl;
// @ts-ignore-next-line
this.onmessage = () => {};
}
postMessage(msg) {
// @ts-ignore-next-line
this.onmessage(msg);
}
};
// @ts-ignore-next-line
global.URL.createObjectURL = () => {};
<file_sep>/packages/utils/src/conversionMomentValue/index.ts
import type { InternalNamePath, NamePath } from 'antd/lib/form/interface';
import moment from 'moment';
import get from 'rc-util/lib/utils/get';
import isNil from '../isNil';
import type { ProFieldValueType } from '../typing';
type DateFormatter = 'number' | 'string' | false;
export const dateFormatterMap = {
time: 'HH:mm:ss',
timeRange: 'HH:mm:ss',
date: 'YYYY-MM-DD',
dateWeek: 'YYYY-wo',
dateMonth: 'YYYY-MM',
dateQuarter: 'YYYY-QQ',
dateYear: 'YYYY',
dateRange: 'YYYY-MM-DD',
dateTime: 'YYYY-MM-DD HH:mm:ss',
dateTimeRange: 'YYYY-MM-DD HH:mm:ss',
};
function isObject(o: any) {
return Object.prototype.toString.call(o) === '[object Object]';
}
export function isPlainObject(o: { constructor: any }) {
if (isObject(o) === false) return false;
// If has modified constructor
const ctor = o.constructor;
if (ctor === undefined) return true;
// If has modified prototype
const prot = ctor.prototype;
if (isObject(prot) === false) return false;
// If constructor does not have an Object-specific method
if (prot.hasOwnProperty('isPrototypeOf') === false) {
return false;
}
// Most likely a plain Object
return true;
}
/**
* 根据不同的格式转化 moment
*
* @param value
* @param dateFormatter
* @param valueType
*/
const convertMoment = (value: moment.Moment, dateFormatter: string | false, valueType: string) => {
if (!dateFormatter) {
return value;
}
if (moment.isMoment(value)) {
if (dateFormatter === 'number') {
return value.valueOf();
}
if (dateFormatter === 'string') {
return value.format(dateFormatterMap[valueType] || 'YYYY-MM-DD HH:mm:ss');
}
if (typeof dateFormatter === 'string' && dateFormatter !== 'string') {
return value.format(dateFormatter);
}
}
return value;
};
/**
* 这里主要是来转化一下数据 将 moment 转化为 string 将 all 默认删除
*
* @param value
* @param dateFormatter
* @param proColumnsMap
*/
const conversionMomentValue = <T = any>(
value: T,
dateFormatter: DateFormatter,
valueTypeMap: Record<
string,
| {
valueType: ProFieldValueType;
dateFormat: string;
}
| any
>,
omitNil?: boolean,
parentKey?: NamePath,
): T => {
const tmpValue = {} as T;
// 如果 value 是 string | null | Blob类型 其中之一,直接返回
// 形如 {key: [File, File]} 的表单字段当进行第二次递归时会导致其直接越过 typeof value !== 'object' 这一判断 https://github.com/ant-design/pro-components/issues/2071
if (typeof value !== 'object' || isNil(value) || value instanceof Blob || Array.isArray(value)) {
return value;
}
Object.keys(value).forEach((key) => {
const namePath: InternalNamePath = parentKey ? ([parentKey, key].flat(1) as string[]) : [key];
const valueFormatMap = get(valueTypeMap, namePath) || 'text';
let valueType: ProFieldValueType = 'text';
let dateFormat: string | undefined;
if (typeof valueFormatMap === 'string') {
valueType = valueFormatMap as ProFieldValueType;
} else if (valueFormatMap) {
valueType = valueFormatMap.valueType;
dateFormat = valueFormatMap.dateFormat;
}
const itemValue = value[key];
if (isNil(itemValue) && omitNil) {
return;
}
// 处理嵌套的情况
if (
isPlainObject(itemValue) &&
// 不是数组
!Array.isArray(itemValue) &&
// 不是 moment
!moment.isMoment(itemValue)
) {
tmpValue[key] = conversionMomentValue(itemValue, dateFormatter, valueTypeMap, omitNil, [key]);
return;
}
// 处理 FormList 的 value
if (Array.isArray(itemValue)) {
tmpValue[key] = itemValue.map((arrayValue, index) => {
if (moment.isMoment(arrayValue)) {
return convertMoment(arrayValue, dateFormat || dateFormatter, valueType);
}
return conversionMomentValue(arrayValue, dateFormatter, valueTypeMap, omitNil, [
key,
`${index}`,
]);
});
return;
}
tmpValue[key] = convertMoment(itemValue, dateFormat || dateFormatter, valueType);
});
return tmpValue;
};
export default conversionMomentValue;
| 642ba92e2ddc5a4ff01ae0872d5fea775b264158 | [
"JavaScript",
"TypeScript",
"Markdown"
] | 24 | JavaScript | MrXujiang/xu-pro-lerna | c84abd8ce0bb38ed103b39b348dcb0f159d65ac7 | 8acf18dae6603ad92d6e4417f565a4ce2a4f00ca |
refs/heads/master | <repo_name>Lab43/Chart-Demo<file_sep>/index.js
var raw = require('./test-json.json').nodes;
var _ = require('underscore');
var jf = require('jsonfile');
var startDate = Date.parse('2014-01-01')
, endDate = Date.parse('2014-12-31')
, days = {}
, topicIDs = []
;
raw.forEach(function (item) {
var node = item.node;
var output = {};
output.date = formatDate(new Date(node.date));
output.desc = node.title;
if (node.topic) { output.topic = node.topic };
if (node.topic_id) { output.topic_id = node.topic_id };
if (node.macro) {
output.type = 'macro';
} else if (node.meso) {
output.type = 'meso';
} else if (node.micro) {
output.type = 'micro';
} else {
output.type = 'other';
}
if (! days[output.date]) {
days[output.date] = [];
}
days[output.date].push(output);
});
var daysOfYear = [];
for (var d = new Date(2014, 0, 1); d <= new Date(2014, 11, 31); d.setDate(d.getDate() + 1)) {
var output = {};
output.date = formatDate(new Date(d));
output.nodes = days[output.date];
output.labelDay = d.getDate();
if (d.getDate() === 1) {
switch(d.getMonth()) {
case 0:
output.labelMonth = 'January';
break;
case 1:
output.labelMonth = 'February';
break;
case 2:
output.labelMonth = 'March';
break;
case 3:
output.labelMonth = 'April';
break;
case 4:
output.labelMonth = 'May';
break;
case 5:
output.labelMonth = 'June';
break;
case 6:
output.labelMonth = 'July';
break;
case 7:
output.labelMonth = 'August';
break;
case 8:
output.labelMonth = 'September';
break;
case 9:
output.labelMonth = 'October';
break;
case 10:
output.labelMonth = 'November';
break;
case 11:
output.labelMonth = 'December';
break;
}
}
daysOfYear.push(output);
}
jf.writeFile('output.json', daysOfYear, function(err) {
console.log(err);
})
function formatDate(day) {
return day.getFullYear() + '-' + (day.getMonth() + 1) + '-' + day.getDate()
}
| 95830a5a8caabd07cfbb67c994683bdca4b2f10d | [
"JavaScript"
] | 1 | JavaScript | Lab43/Chart-Demo | bde5fd4818c5ef1afcbd481fdb27d580b457b398 | 73939100fa1bdb0f43f6a42b99996589863a50f4 |
refs/heads/master | <file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/detail/utility/invoke_functors.hpp>
#include <agency/execution/executor/customization_points/bulk_then_execute.hpp>
#include <agency/future.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class E, class Function, class Future, class ResultFactory, class... SharedFactories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(SharedFactories)),
__AGENCY_REQUIRES(!std::is_void<result_of_continuation_t<Function, executor_index_t<E>, Future, result_of_t<SharedFactories()>&...>>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,result_of_t<ResultFactory()>>
bulk_then_execute_with_collected_result(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, ResultFactory result_factory, SharedFactories... shared_factories)
{
using predecessor_type = future_value_t<Future>;
// wrap f in a functor that will collect f's result and call bulk_then_execute()
return agency::bulk_then_execute(exec, invoke_and_collect_result<Function,predecessor_type>{f}, shape, predecessor, result_factory, shared_factories...);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <type_traits>
#include <agency/detail/invoke.hpp>
#include <agency/future.hpp>
#include <future>
// this type falls into no executor categories
struct not_an_executor {};
class continuation_executor
{
public:
template<class Function, class T>
std::future<agency::detail::result_of_t<Function(T&)>>
then_execute(Function&& f, std::future<T>& predecessor)
{
return std::async(std::launch::async, [](std::future<T>&& predecessor, Function&& f)
{
T arg = predecessor.get();
return std::forward<Function>(f)(arg);
},
std::move(predecessor),
std::forward<Function>(f)
);
}
template<class Function>
std::future<agency::detail::result_of_t<Function()>>
then_execute(Function&& f, std::future<void>& predecessor)
{
return std::async(std::launch::async, [](std::future<void>&& predecessor, Function&& f)
{
predecessor.get();
return std::forward<Function>(f)();
},
std::move(predecessor),
std::forward<Function>(f)
);
}
};
class asynchronous_executor
{
public:
template<class Function>
std::future<agency::detail::result_of_t<Function()>>
async_execute(Function&& f)
{
return std::async(std::launch::async, std::forward<Function>(f));
}
};
class synchronous_executor
{
public:
template<class Function>
agency::detail::result_of_t<Function()>
sync_execute(Function&& f)
{
return std::forward<Function>(f)();
}
};
class bulk_continuation_executor
{
public:
template<class Function, class Future, class ResultFactory, class SharedFactory>
std::future<
typename std::result_of<ResultFactory()>::type
>
bulk_then_execute(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory)
{
return bulk_then_execute_impl(f, n, predecessor, result_factory, shared_factory);
}
private:
template<class Function, class Future, class ResultFactory, class SharedFactory>
std::future<agency::detail::result_of_t<ResultFactory()>>
bulk_then_execute_impl(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory,
typename std::enable_if<
!std::is_void<
typename agency::future_traits<Future>::value_type
>::value
>::type* = 0)
{
if(n > 0)
{
using predecessor_type = typename agency::future_traits<Future>::value_type;
return agency::detail::monadic_then(predecessor, std::launch::async, [=](predecessor_type& predecessor) mutable
{
// put all the shared parameters on the first thread's stack
auto result = result_factory();
auto shared_parameter = shared_factory();
// create a lambda to handle parameter passing
auto g = [&,f](size_t idx)
{
agency::detail::invoke(f, idx, predecessor, result, shared_parameter);
};
size_t mid = n / 2;
std::future<void> left = agency::detail::make_ready_future();
if(0 < mid)
{
left = this->async(g, 0, mid);
}
std::future<void> right = agency::detail::make_ready_future();
if(mid + 1 < n)
{
right = this->async(g, mid + 1, n);
}
g(mid);
left.wait();
right.wait();
return std::move(result);
});
}
return agency::detail::make_ready_future(result_factory());
}
template<class Function, class Future, class ResultFactory, class SharedFactory>
std::future<agency::detail::result_of_t<ResultFactory()>>
bulk_then_execute_impl(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory,
typename std::enable_if<
std::is_void<
typename agency::future_traits<Future>::value_type
>::value
>::type* = 0)
{
if(n > 0)
{
return agency::detail::monadic_then(predecessor, std::launch::async, [=]() mutable
{
// put all the shared parameters on the first thread's stack
auto result = result_factory();
auto shared_parameter = shared_factory();
// create a lambda to handle parameter passing
auto g = [&,f](size_t idx)
{
agency::detail::invoke(f, idx, result, shared_parameter);
};
size_t mid = n / 2;
std::future<void> left = agency::detail::make_ready_future();
if(0 < mid)
{
left = this->async(g, 0, mid);
}
std::future<void> right = agency::detail::make_ready_future();
if(mid + 1 < n)
{
right = this->async(g, mid + 1, n);
}
g(mid);
left.wait();
right.wait();
return std::move(result);
});
}
return agency::detail::make_ready_future(result_factory());
}
// first must be less than last
template<class Function>
std::future<void> async(Function f, size_t first, size_t last)
{
return std::async(std::launch::async, [=]() mutable
{
size_t mid = (last + first) / 2;
std::future<void> left = agency::detail::make_ready_future();
if(first < mid)
{
left = this->async(f, first, mid);
}
std::future<void> right = agency::detail::make_ready_future();
if(mid + 1 < last)
{
right = this->async(f, mid + 1, last);
}
agency::detail::invoke(f,mid);
left.wait();
right.wait();
});
}
};
class bulk_synchronous_executor
{
public:
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
auto result = result_factory();
auto shared_parm = shared_factory();
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_parm);
}
return std::move(result);
}
};
class bulk_asynchronous_executor
{
public:
template<class Function, class ResultFactory, class SharedFactory>
std::future<
typename std::result_of<ResultFactory()>::type
>
bulk_async_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
return std::async(std::launch::async, [=]
{
auto result = result_factory();
auto shared_parm = shared_factory();
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_parm);
}
return std::move(result);
});
}
};
// these executor types fall into two categories
struct not_a_bulk_synchronous_executor : bulk_asynchronous_executor, bulk_continuation_executor {};
struct not_a_bulk_asynchronous_executor : bulk_synchronous_executor, bulk_continuation_executor {};
struct not_a_bulk_continuation_executor : bulk_synchronous_executor, bulk_asynchronous_executor {};
// this executor type falls into three categories
struct complete_bulk_executor : bulk_synchronous_executor, bulk_asynchronous_executor, bulk_continuation_executor {};
struct bulk_executor_without_shape_type
{
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory);
};
struct bulk_executor_with_shape_type
{
struct shape_type
{
size_t n;
};
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, shape_type n, ResultFactory result_factory, SharedFactory shared_factory);
};
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/future.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include "../test_executors.hpp"
template<class Executor>
void test(Executor exec)
{
using shape_type = agency::executor_shape_t<Executor>;
using index_type = agency::executor_index_t<Executor>;
shape_type shape = 10;
auto result = agency::bulk_sync_execute(exec,
[](index_type idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = 7 + shared_arg[idx];
},
shape,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
assert(std::vector<int>(10, 7 + 13) == result);
}
int main()
{
test(bulk_synchronous_executor());
test(bulk_asynchronous_executor());
test(bulk_continuation_executor());
test(not_a_bulk_synchronous_executor());
test(not_a_bulk_asynchronous_executor());
test(not_a_bulk_continuation_executor());
test(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <agency/execution/executor/parallel_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points.hpp>
int main()
{
using namespace agency;
static_assert(is_bulk_continuation_executor<parallel_executor>::value,
"parallel_executor should be a bulk continuation executor");
static_assert(is_bulk_executor<parallel_executor>::value,
"parallel_executor should be a bulk executor");
static_assert(detail::is_detected_exact<parallel_execution_tag, executor_execution_category_t, parallel_executor>::value,
"parallel_executor should have parallel_execution_tag execution_category");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, parallel_executor>::value,
"parallel_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, parallel_executor>::value,
"parallel_executor should have size_t index_type");
static_assert(detail::is_detected_exact<std::future<int>, executor_future_t, parallel_executor, int>::value,
"parallel_executor should have std::future future");
static_assert(executor_execution_depth<parallel_executor>::value == 1,
"parallel_executor should have execution_depth == 1");
parallel_executor exec;
std::future<int> fut = agency::make_ready_future<int>(exec, 7);
size_t shape = 10;
auto f = exec.bulk_then_execute(
[](size_t idx, int& past_arg, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = past_arg + shared_arg[idx];
},
shape,
fut,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(10, 7 + 13) == result);
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/executor_traits/detail/member_allocator_or.hpp>
#include <memory>
namespace agency
{
namespace detail
{
template<class BulkExecutor, class T, bool Enable = is_bulk_executor<BulkExecutor>::value>
struct executor_allocator_impl
{
};
template<class BulkExecutor, class T>
struct executor_allocator_impl<BulkExecutor,T,true>
{
using type = member_allocator_or_t<BulkExecutor,T,std::allocator>;
};
} // end detail
template<class BulkExecutor, class T>
struct executor_allocator : detail::executor_allocator_impl<BulkExecutor,T> {};
template<class BulkExecutor, class T>
using executor_allocator_t = typename executor_allocator<BulkExecutor,T>::type;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
#include <agency/memory/allocator/detail/allocator_traits/is_allocator.hpp>
#include <agency/bulk_invoke.hpp>
#include <agency/execution/execution_policy/detail/simple_sequenced_policy.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/iterator/iterator_traits.hpp>
namespace agency
{
namespace detail
{
struct destroy_functor
{
__agency_exec_check_disable__
template<class Agent, class Allocator, class RandomAccessIterator>
__AGENCY_ANNOTATION
void operator()(Agent& self, Allocator alloc, RandomAccessIterator first)
{
auto i = self.rank();
allocator_traits<Allocator>::destroy(alloc, &first[i]);
}
};
// this overload is for cases where we need not execute sequentially:
// 1. ExecutionPolicy is not sequenced AND
// 2. Iterator is random access
template<class ExecutionPolicy, class Allocator, class RandomAccessIterator,
__AGENCY_REQUIRES(
detail::is_allocator<Allocator>::value
),
__AGENCY_REQUIRES(
!policy_is_sequenced<decay_t<ExecutionPolicy>>::value and
iterator_is_random_access<RandomAccessIterator>::value
)>
__AGENCY_ANNOTATION
RandomAccessIterator destroy(ExecutionPolicy&& policy, const Allocator& alloc, RandomAccessIterator first, RandomAccessIterator last)
{
auto n = last - first;
agency::bulk_invoke(policy(n), destroy_functor(), alloc, first);
return first + n;
}
// this overload is for cases where we must execute sequentially
// 1. ExecutionPolicy is sequenced OR
// 2. Iterators are not random access
template<class ExecutionPolicy, class Allocator, class Iterator,
__AGENCY_REQUIRES(
detail::is_allocator<Allocator>::value
),
__AGENCY_REQUIRES(
policy_is_sequenced<decay_t<ExecutionPolicy>>::value or
!iterator_is_random_access<Iterator>::value
)>
__AGENCY_ANNOTATION
Iterator destroy(ExecutionPolicy&&, Allocator& alloc, Iterator first, Iterator last)
{
// XXX perhaps we should bulk_invoke a single agent and execute this loop in that agent
for(; first != last; ++first)
{
agency::detail::allocator_traits<Allocator>::destroy(alloc, &*first);
}
return first;
}
template<class Allocator, class Iterator,
__AGENCY_REQUIRES(
detail::is_allocator<Allocator>::value
)>
__AGENCY_ANNOTATION
Iterator destroy(Allocator& alloc, Iterator first, Iterator last)
{
// use simple_sequenced_policy here to avoid circular dependencies
// created by the use of sequenced_policy
simple_sequenced_policy seq;
return detail::destroy(seq, alloc, first, last);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/factory.hpp>
#include <agency/detail/uninitialized.hpp>
#include <agency/detail/tuple/tuple_utility.hpp>
#include <type_traits>
namespace agency
{
namespace cuda
{
namespace detail
{
template<class Factory>
struct result_of_factory_is_empty
: std::integral_constant<
bool,
(std::is_empty<agency::detail::result_of_t<Factory()>>::value ||
agency::detail::is_empty_tuple<agency::detail::result_of_t<Factory()>>::value)
>
{};
template<class Factory, bool = result_of_factory_is_empty<Factory>::value>
struct on_chip_shared_parameter
{
using value_type = agency::detail::result_of_t<Factory()>;
inline __device__
on_chip_shared_parameter(bool is_leader, Factory factory)
: is_leader_(is_leader)
{
__shared__ agency::detail::uninitialized<value_type> inner_shared_param;
if(is_leader_)
{
inner_shared_param.construct(factory());
}
__syncthreads();
inner_shared_param_ = &inner_shared_param;
}
on_chip_shared_parameter(const on_chip_shared_parameter&) = delete;
on_chip_shared_parameter(on_chip_shared_parameter&&) = delete;
inline __device__
~on_chip_shared_parameter()
{
__syncthreads();
if(is_leader_)
{
inner_shared_param_->destroy();
}
}
inline __device__
value_type& get()
{
return inner_shared_param_->get();
}
const bool is_leader_;
agency::detail::uninitialized<value_type>* inner_shared_param_;
};
template<class Factory>
struct on_chip_shared_parameter<Factory,true>
{
using value_type = agency::detail::result_of_t<Factory()>;
inline __device__
on_chip_shared_parameter(bool is_leader_, Factory) {}
on_chip_shared_parameter(const on_chip_shared_parameter&) = delete;
on_chip_shared_parameter(on_chip_shared_parameter&&) = delete;
inline __device__
value_type& get()
{
return param_;
}
value_type param_;
};
} // end detail
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_synchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_asynchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_continuation_executor.hpp>
namespace agency
{
template<class T>
using is_bulk_executor = agency::detail::disjunction<
is_bulk_synchronous_executor<T>,
is_bulk_asynchronous_executor<T>,
is_bulk_continuation_executor<T>
>;
namespace detail
{
// a fake Concept to use with __AGENCY_REQUIRES
template<class T>
constexpr bool BulkExecutor()
{
return is_bulk_executor<T>();
}
} // end detail
} // end agency
<file_sep>// Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/detail/type_list.hpp>
#include <agency/detail/tuple/tuple_leaf.hpp>
#include <stddef.h> // XXX instead of <cstddef> to WAR clang issue
#include <type_traits>
#include <utility> // <utility> declares std::tuple_element et al. for us
namespace agency
{
namespace detail
{
// declare tuple_base so that the specializations in std:: below can refer to it
template<class IndexSequence, class... Args>
class tuple_base;
} // end detail
} // end agency
// specializations of stuff in std come before their use below in the definition of tuple_base
namespace std
{
template<size_t i, class IndexSequence>
class tuple_element<i, agency::detail::tuple_base<IndexSequence>> {};
template<class IndexSequence, class Type1, class... Types>
class tuple_element<0, agency::detail::tuple_base<IndexSequence,Type1,Types...>>
{
public:
using type = Type1;
};
template<size_t i, class IndexSequence, class Type1, class... Types>
class tuple_element<i, agency::detail::tuple_base<IndexSequence,Type1,Types...>>
{
public:
using type = typename tuple_element<i - 1, agency::detail::tuple_base<IndexSequence,Types...>>::type;
};
template<class IndexSequence, class... Types>
class tuple_size<agency::detail::tuple_base<IndexSequence,Types...>>
: public std::integral_constant<size_t, sizeof...(Types)>
{};
} // end std
namespace agency
{
namespace detail
{
// XXX this implementation is based on <NAME>'s "tuple leaf" construction in libcxx
template<size_t... I, class... Types>
class tuple_base<index_sequence<I...>, Types...>
: public tuple_leaf<I,Types>...
{
public:
__AGENCY_ANNOTATION
tuple_base() = default;
__AGENCY_ANNOTATION
tuple_base(const Types&... args)
: tuple_leaf<I,Types>(args)...
{}
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_constructible<Types,UTypes&&>...
>::value
)>
__AGENCY_ANNOTATION
explicit tuple_base(UTypes&&... args)
: tuple_leaf<I,Types>(std::forward<UTypes>(args))...
{}
__AGENCY_ANNOTATION
tuple_base(const tuple_base& other)
: tuple_leaf<I,Types>(other.template const_leaf<I>())...
{}
__AGENCY_ANNOTATION
tuple_base(tuple_base&& other)
: tuple_leaf<I,Types>(std::move(other.template mutable_leaf<I>()))...
{}
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_constructible<Types,const UTypes&>...
>::value
)>
__AGENCY_ANNOTATION
tuple_base(const tuple_base<index_sequence<I...>,UTypes...>& other)
: tuple_leaf<I,Types>(other.template const_leaf<I>())...
{}
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_constructible<Types,UTypes&&>...
>::value
)>
__AGENCY_ANNOTATION
tuple_base(tuple_base<index_sequence<I...>,UTypes...>&& other)
: tuple_leaf<I,Types>(std::move(other.template mutable_leaf<I>()))...
{}
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_constructible<Types,const UTypes&>...
>::value
)>
__AGENCY_ANNOTATION
tuple_base(const std::tuple<UTypes...>& other)
: tuple_base{std::get<I>(other)...}
{}
__AGENCY_ANNOTATION
tuple_base& operator=(const tuple_base& other)
{
swallow((mutable_leaf<I>() = other.template const_leaf<I>())...);
return *this;
}
__AGENCY_ANNOTATION
tuple_base& operator=(tuple_base&& other)
{
swallow((mutable_leaf<I>() = std::move(other.template mutable_leaf<I>()))...);
return *this;
}
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_assignable<Types,const UTypes&>...
>::value
)>
__AGENCY_ANNOTATION
tuple_base& operator=(const tuple_base<index_sequence<I...>,UTypes...>& other)
{
swallow((mutable_leaf<I>() = other.template const_leaf<I>())...);
return *this;
}
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_assignable<Types,UTypes&&>...
>::value
)>
__AGENCY_ANNOTATION
tuple_base& operator=(tuple_base<index_sequence<I...>,UTypes...>&& other)
{
swallow((mutable_leaf<I>() = std::move(other.template mutable_leaf<I>()))...);
return *this;
}
template<class UType1, class UType2,
__AGENCY_REQUIRES(
(sizeof...(Types) == 2) &&
conjunction<
std::is_assignable<typename std::tuple_element< 0,tuple_base>::type,const UType1&>,
std::is_assignable<typename std::tuple_element<sizeof...(Types) == 2 ? 1 : 0,tuple_base>::type,const UType2&>
>::value
)>
__AGENCY_ANNOTATION
tuple_base& operator=(const std::pair<UType1,UType2>& p)
{
mutable_get<0>() = p.first;
mutable_get<1>() = p.second;
return *this;
}
template<class UType1, class UType2,
__AGENCY_REQUIRES(
(sizeof...(Types) == 2) &&
conjunction<
std::is_assignable<typename std::tuple_element< 0,tuple_base>::type,UType1&&>,
std::is_assignable<typename std::tuple_element<sizeof...(Types) == 2 ? 1 : 0,tuple_base>::type,UType2&&>
>::value
)>
__AGENCY_ANNOTATION
tuple_base& operator=(std::pair<UType1,UType2>&& p)
{
mutable_get<0>() = std::move(p.first);
mutable_get<1>() = std::move(p.second);
return *this;
}
template<size_t i>
__AGENCY_ANNOTATION
const tuple_leaf<i,typename std::tuple_element<i,tuple_base>::type>& const_leaf() const
{
return *this;
}
template<size_t i>
__AGENCY_ANNOTATION
tuple_leaf<i,typename std::tuple_element<i,tuple_base>::type>& mutable_leaf()
{
return *this;
}
template<size_t i>
__AGENCY_ANNOTATION
tuple_leaf<i,typename std::tuple_element<i,tuple_base>::type>&& move_leaf() &&
{
return std::move(*this);
}
__AGENCY_ANNOTATION
void swap(tuple_base& other)
{
swallow(tuple_leaf<I,Types>::swap(other)...);
}
template<size_t i>
__AGENCY_ANNOTATION
const typename std::tuple_element<i,tuple_base>::type& const_get() const
{
return const_leaf<i>().const_get();
}
template<size_t i>
__AGENCY_ANNOTATION
typename std::tuple_element<i,tuple_base>::type& mutable_get()
{
return mutable_leaf<i>().mutable_get();
}
// enable conversion to Tuple-like things
// XXX relax std::tuple to Tuple and require is_tuple
template<class... UTypes,
__AGENCY_REQUIRES(
(sizeof...(Types) == sizeof...(UTypes)) &&
conjunction<
std::is_constructible<Types,const UTypes&>...
>::value
)>
__AGENCY_ANNOTATION
operator std::tuple<UTypes...> () const
{
return std::tuple<UTypes...>(const_get<I>()...);
}
private:
template<class... Args>
__AGENCY_ANNOTATION
static void swallow(Args&&...) {}
};
} // end detail
} // end agency
<file_sep>/// \file
/// \brief Contains definitions of execution categories.
///
/// \defgroup execution_categories Execution Categories
/// \ingroup execution
/// \brief Execution categories categorize forward progress behavior.
///
/// Execution categories describe the forward progress behavior of groups of execution agents
/// without regard to the thread on which an individual agent executes.
///
/// Each execution agent within a group of such agents is associated with a function invocation.
/// This function invocation depends on the context in which execution agents
/// are created; for example, the group of function invocations created by bulk_invoke().
///
/// Execution categories describe the ordering of these function invocations with respect to one another.
///
/// 1. sequenced: Invocations are sequenced.
/// 2. concurrent: Unblocked invocations make progress.
/// 3. parallel: When invocations occur on the same thread, they are sequenced. When invocations occur on
/// different threads, they are unsequenced.
/// 4. unsequenced: Invocations are unsequenced.
///
/// Execution categories are represented in the C++ type system using tag types. Different components of
/// Agency use these types to reason about forward progress guarantees and validate that forward progress
/// guarantees satisfy forward progress requirements.
#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <type_traits>
#include <stddef.h>
namespace agency
{
/// \brief Type representing the sequenced execution category.
/// \ingroup execution_categories
struct sequenced_execution_tag {};
/// \brief Type representing the concurrent execution category.
/// \ingroup execution_categories
struct concurrent_execution_tag {};
/// \brief Type representing the parallel execution category.
/// \ingroup execution_categories
struct parallel_execution_tag {};
/// \brief Type representing the unsequenced execution category.
/// \ingroup execution_categories
struct unsequenced_execution_tag {};
/// \brief Type representing the scoped execution category.
/// \ingroup execution_categories
template<class ExecutionCategory1, class ExecutionCategory2>
struct scoped_execution_tag
{
using outer_execution_category = ExecutionCategory1;
using inner_execution_category = ExecutionCategory2;
};
/// \brief Type representing the dynamic execution category.
/// \ingroup execution_categories
struct dynamic_execution_tag {};
namespace detail
{
// XXX maybe "weakness" is the wrong way to describe what we're really interested in here
// we just want some idea of substitutability
// "<" means "is weaker than"
// "<" is transitive
// if category A is weaker than category B,
// then agents in category A can be executed with agents in category B
//
// these relationships should be true
//
// parallel_execution_tag < sequenced_execution_tag
// parallel_execution_tag < concurrent_execution_tag
// unsequenced_execution_tag < parallel_execution_tag
// dynamic_execution_tag < unsequenced_execution_tag
//
// XXX figure out how scoped_execution_tag sorts
// in general, categories are not weaker than another
template<class ExecutionCategory1, class ExecutionCategory2>
struct is_weaker_than : std::false_type {};
// all categories are weaker than themselves
template<class ExecutionCategory>
struct is_weaker_than<ExecutionCategory,ExecutionCategory> : std::true_type {};
// dynamic is weaker than everything else
template<class ExecutionCategory2>
struct is_weaker_than<dynamic_execution_tag, ExecutionCategory2> : std::true_type {};
// introduce this specialization to disambiguate two other specializations
template<>
struct is_weaker_than<dynamic_execution_tag, dynamic_execution_tag> : std::true_type {};
// unsequenced is weaker than everything except dynamic
template<class ExecutionCategory2>
struct is_weaker_than<unsequenced_execution_tag, ExecutionCategory2> : std::true_type {};
// introduce this specialization to disambiguate two other specializations
template<>
struct is_weaker_than<unsequenced_execution_tag, unsequenced_execution_tag> : std::true_type {};
template<>
struct is_weaker_than<unsequenced_execution_tag, dynamic_execution_tag> : std::false_type {};
// parallel is weaker than sequenced & concurrent
template<>
struct is_weaker_than<parallel_execution_tag, sequenced_execution_tag> : std::true_type {};
template<>
struct is_weaker_than<parallel_execution_tag, concurrent_execution_tag> : std::true_type {};
template<class ExecutionCategory>
struct is_scoped_execution_category : std::false_type {};
template<class ExecutionCategory1, class ExecutionCategory2>
struct is_scoped_execution_category<scoped_execution_tag<ExecutionCategory1,ExecutionCategory2>> : std::true_type {};
template<class ExecutionCategory>
struct execution_depth : std::integral_constant<size_t, 1> {};
template<class ExecutionCategory1, class ExecutionCategory2>
struct execution_depth<scoped_execution_tag<ExecutionCategory1,ExecutionCategory2>>
: std::integral_constant<
size_t,
1 + execution_depth<ExecutionCategory2>::value
>
{};
// this namespace contains the implementation of common_execution_category
namespace common_execution_category_detail
{
// because the implementation of common_execution_category2 is recursive, we introduce
// a forward declaration so it may call itself.
template<class ExecutionCategory1, class ExecutionCategory2>
struct common_execution_category2;
template<class ExecutionCategory1, class ExecutionCategory2>
using common_execution_category2_t = typename common_execution_category2<ExecutionCategory1,ExecutionCategory2>::type;
// the implementation of common_execution_category2_impl is recursive, and there are two base cases
// base case 1: the input categories have different depths
// i.e., one may be "flat" and the other scoped,
// or both may be scoped but have different depths
template<class ExecutionCategory1, class ExecutionCategory2, size_t depth1, size_t depth2>
struct common_execution_category2_impl
{
// there's no commonality between the two input categories
// so the result is "dynamic" -- there are no static guarantees that can be provided
using type = dynamic_execution_tag;
};
// base case 2: both input categories are "flat"
template<class ExecutionCategory1, class ExecutionCategory2>
struct common_execution_category2_impl<ExecutionCategory1,ExecutionCategory2,1,1>
{
// both ExecutionCategory1 & ExecutionCategory2 have depth 1 -- they are "flat"
// if one of the two categories is weaker than the other, then return it
// otherwise, return the weakest static guarantee: unsequenced
using type = conditional_t<
is_weaker_than<ExecutionCategory1,ExecutionCategory2>::value,
ExecutionCategory1,
conditional_t<
is_weaker_than<ExecutionCategory2,ExecutionCategory1>::value,
ExecutionCategory2,
unsequenced_execution_tag
>
>;
};
// recursive case: both input categories are scoped
template<class OuterCategory1, class InnerCategory1, class OuterCategory2, class InnerCategory2, size_t depth>
struct common_execution_category2_impl<
scoped_execution_tag<OuterCategory1,InnerCategory1>,
scoped_execution_tag<OuterCategory2,InnerCategory2>,
depth,depth
>
{
// both categories are scoped and they have the same depth
// XXX it may not matter so much that the depth is the same.
// we may still be able to apply this recipe sensibly even
// when the two input categories' depths differ
// the result is scoped. apply common_execution_category to
// the inputs' constituents to get the result's constituents
using type = scoped_execution_tag<
common_execution_category2_t<OuterCategory1,OuterCategory2>,
common_execution_category2_t<InnerCategory1,InnerCategory2>
>;
};
template<class ExecutionCategory1, class ExecutionCategory2>
struct common_execution_category2
{
using type = typename common_execution_category2_impl<
ExecutionCategory1,
ExecutionCategory2,
execution_depth<ExecutionCategory1>::value,
execution_depth<ExecutionCategory2>::value
>::type;
};
} // end common_execution_category_detail
// common_execution_category is a type trait which, given one or more possibly different execution categories,
// returns a category representing the strongest guarantees that can be made given the different input possibilities
template<class ExecutionCategory, class... ExecutionCategories>
struct common_execution_category;
template<class ExecutionCategory, class... ExecutionCategories>
using common_execution_category_t = typename common_execution_category<ExecutionCategory,ExecutionCategories...>::type;
// the implementation of common_execution_category is recursive
// this is the recursive case
template<class ExecutionCategory1, class ExecutionCategory2, class... ExecutionCategories>
struct common_execution_category<ExecutionCategory1, ExecutionCategory2, ExecutionCategories...>
{
using type = common_execution_category_t<
ExecutionCategory1,
common_execution_category_t<ExecutionCategory2, ExecutionCategories...>
>;
};
// base case 1: a single category
template<class ExecutionCategory>
struct common_execution_category<ExecutionCategory>
{
using type = ExecutionCategory;
};
// base case 2: two categories
template<class ExecutionCategory1, class ExecutionCategory2>
struct common_execution_category<ExecutionCategory1,ExecutionCategory2>
{
// with two categories, we lower onto the two category
// implementation inside common_execution_category_detail
using type = common_execution_category_detail::common_execution_category2_t<ExecutionCategory1,ExecutionCategory2>;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <iterator>
namespace agency
{
namespace detail
{
template<class Iterator>
class reverse_iterator
{
public:
using value_type = typename std::iterator_traits<Iterator>::value_type;
using reference = typename std::iterator_traits<Iterator>::reference;
using pointer = typename std::iterator_traits<Iterator>::pointer;
using difference_type = typename std::iterator_traits<Iterator>::difference_type;
using iterator_category = typename std::iterator_traits<Iterator>::iterator_category;
using iterator_type = Iterator;
__AGENCY_ANNOTATION
reverse_iterator() = default;
__AGENCY_ANNOTATION
explicit reverse_iterator(Iterator x)
: current_(x)
{}
template<class U>
__AGENCY_ANNOTATION
reverse_iterator(const reverse_iterator<U>& other)
: current_(other.base())
{}
template<class U>
__AGENCY_ANNOTATION
reverse_iterator& operator=(const reverse_iterator<U>& other)
{
current_ = other.base();
return *this;
}
__AGENCY_ANNOTATION
Iterator base() const
{
return current_;
}
// dereference
__AGENCY_ANNOTATION
reference operator*() const
{
Iterator tmp = current_;
return *--tmp;
}
__AGENCY_ANNOTATION
reference operator->() const
{
return &operator*();
}
// subscript
__AGENCY_ANNOTATION
reference operator[](difference_type n) const
{
reverse_iterator tmp = *this;
tmp += n;
return *tmp;
}
// pre-increment
__AGENCY_ANNOTATION
reverse_iterator& operator++()
{
--current_;
return *this;
}
// pre-decrement
reverse_iterator& operator--()
{
++current_;
return *this;
}
// post-increment
reverse_iterator operator++(int)
{
reverse_iterator result = *this;
--current_;
return result;
}
// post-decrement
reverse_iterator operator--(int)
{
reverse_iterator result = *this;
++current_;
return result;
}
// plus
reverse_iterator operator+(difference_type n) const
{
reverse_iterator result = *this;
result += n;
return result;
}
// minus
reverse_iterator operator-(difference_type n) const
{
reverse_iterator result = *this;
result -= n;
return result;
}
// plus-equal
reverse_iterator& operator+=(difference_type n)
{
current_ -= n;
return *this;
}
// minus-equal
reverse_iterator& operator-=(difference_type n)
{
current_ += n;
return *this;
}
private:
iterator_type current_;
};
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
bool operator==(const reverse_iterator<Iterator1>& lhs,
const reverse_iterator<Iterator2>& rhs)
{
return lhs.base() == rhs.base();
}
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
bool operator!=(const reverse_iterator<Iterator1>& lhs,
const reverse_iterator<Iterator2>& rhs)
{
return lhs.base() != rhs.base();
}
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
bool operator<(const reverse_iterator<Iterator1>& lhs,
const reverse_iterator<Iterator2>& rhs)
{
return lhs.base() < rhs.base();
}
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
bool operator<=(const reverse_iterator<Iterator1>& lhs,
const reverse_iterator<Iterator2>& rhs)
{
return lhs.base() <= rhs.base();
}
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
bool operator>(const reverse_iterator<Iterator1>& lhs,
const reverse_iterator<Iterator2>& rhs)
{
return lhs.base() > rhs.base();
}
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
bool operator>=(const reverse_iterator<Iterator1>& lhs,
const reverse_iterator<Iterator2>& rhs)
{
return lhs.base() >= rhs.base();
}
template<class Iterator>
__AGENCY_ANNOTATION
reverse_iterator<Iterator> make_reverse_iterator(Iterator i)
{
return reverse_iterator<Iterator>(i);
}
} // end detail
} // end agency
<file_sep># this python/scons script implements Agency's build logic
# it may make the most sense to read this file beginning
# at the bottom and proceeding towards the top
import os
def create_a_program_for_each_source_in_the_current_directory(env):
"""Collects all source files in the current directory and creates a program from each of them.
Returns the list of all such programs created.
"""
sources = []
directories = ['.']
extensions = ['.cpp', '.cu']
for dir in directories:
for ext in extensions:
regex = os.path.join(dir, '*' + ext)
sources.extend(env.Glob(regex))
programs = []
for src in sources:
# env.Program() always returns a list of targets
# but an executable program always has a single target,
# so collect the first element of the list
program = env.Program(src)[0]
programs.append(program)
return programs
def create_an_alias_to_execute_programs_as_unit_tests(env, programs, run_programs_command):
"""Creates an alias with a name given by run_programs_command which runs each program in programs after it is built"""
relative_path_from_root = env.Dir('.').path
# XXX WAR an issue where env.Dir('.').path does not return a relative path for the root directory
root_abspath = os.path.dirname(os.path.realpath("__file__"))
if relative_path_from_root == root_abspath:
relative_path_from_root = '.'
# elide '.'
if relative_path_from_root == '.':
relative_path_from_root = ''
alias_name = os.path.join(relative_path_from_root, run_programs_command)
program_absolute_paths = [p.abspath for p in programs]
alias = env.Alias(alias_name, programs, program_absolute_paths)
env.AlwaysBuild(alias)
return [alias]
# this is the function each SConscript in the directory tree calls
# we will add it as a method to the SCons environment that subsidiary SConscripts import
def RecursivelyCreateProgramsAndUnitTestAliases(env):
# create a program for each source found in the current directory
programs = create_a_program_for_each_source_in_the_current_directory(env)
# recurse into all SConscripts in immediate child directories and add their programs to our collection
# we either receive a list of programs or a list of list of programs
# when there are multiple child directories, this returns a list of lists of programs
# when there are 1 or 0 child directories, this returns a list of programs
programs_of_each_child = env.SConscript(env.Glob('*/SConscript'), exports='env')
try:
for child_programs in programs_of_each_child:
programs.extend(child_programs)
except:
programs.extend(programs_of_each_child)
# create unit tests for these programs and run them when "run_examples" is given as a scons command line option
create_an_alias_to_execute_programs_as_unit_tests(env, programs, 'run_examples')
return programs
# this function takes a SCons environment and specifies some compiler flags to use
def apply_compiler_flags(env):
# a dictionary mapping compiler features to the list of compiler switches implementing them
gnu_compiler_flags = {
'warnings' : {
'all' : '-Wall',
'extra' : '-Wextra'
},
'warnings_as_errors' : '-Werror'
}
clang_compiler_flags = {
'warnings' : {
# XXX with clang, nvcc generates -Wunused-local-typedefs warnings due to nvbug 1890561
# eliminate this workaround once 1890561 is resolved
# XXX with clang, nvcc generates -Wunused-private-field warnings due to nvbug 1890717
# eliminate this workaround once 1890717 is resolved
'all' : '-Wall -Wno-unused-local-typedef -Wno-unused-private-field',
# -Wmismatched-tags produces warnings we cannot eliminate, so don't enable it
# XXX with clang, nvcc generates -Wunused-parameter warnings due to nvbug 1889862
# eliminate this workaround once 1889862 is resolved
'extra' : '-Wextra -Wno-mismatched-tags -Wno-unused-parameter'
},
'warnings_as_errors' : '-Werror'
}
all_compiler_flags = {}
all_compiler_flags['g++'] = gnu_compiler_flags
all_compiler_flags['clang'] = clang_compiler_flags
# chop off any version suffix from C++ compiler name
compiler_name = env['CXX'].split('-')[0]
this_compilers_flags = all_compiler_flags[compiler_name]
# get all the c++ compiler flags for the warnings enabled
cxx_warning_flags = [this_compilers_flags['warnings'][key] for key in env['warnings']]
if env['warnings_as_errors']:
cxx_warning_flags.append(this_compilers_flags['warnings_as_errors'])
# first, general C++ flags
env.MergeFlags(['-O3', '-std=c++11', '-lstdc++', '-lpthread'] + cxx_warning_flags)
# next, flags for nvcc
env.MergeFlags(['--expt-extended-lambda', '-arch=' + str(env['arch'])])
# script execution begins here
# set up some variables we can control from the command line
vars = Variables()
vars.Add('CXX', 'C++ compiler', 'clang')
vars.Add('CPPPATH', 'Agency include path', Dir('..'))
vars.Add(ListVariable('arch', 'Compute capability code generation', 'sm_52',
['sm_30', 'sm_32', 'sm_35', 'sm_37',
'sm_50', 'sm_52',
'sm_60']))
vars.Add(ListVariable('warnings', 'Compiler warning options', 'all',
['all', 'extra']))
vars.Add(BoolVariable('warnings_as_errors', 'Treat warnings as errors', True))
# create a SCons build environment
env = Environment(variables = vars, tools = ['default', 'nvcc-scons/nvcc'])
apply_compiler_flags(env)
# add our custom shorthand methods for subsidiary SConscripts' use
env.AddMethod(RecursivelyCreateProgramsAndUnitTestAliases)
# call this directory's SConscript
env.SConscript('./SConscript', exports = 'env')
<file_sep>#pragma once
#include <agency/future.hpp>
#include <agency/execution/execution_categories.hpp>
#include <functional>
#include <utility>
namespace agency
{
class sequenced_executor
{
public:
using execution_category = sequenced_execution_tag;
template<class Function, class ResultFactory, class SharedFactory>
agency::detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
auto result = result_factory();
auto shared_parm = shared_factory();
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_parm);
}
return std::move(result);
}
};
} // end agency
<file_sep>#include <agency/execution/executor/executor_traits.hpp>
#include <type_traits>
#include <iostream>
struct not_an_executor {};
struct bulk_executor_without_index_type
{
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory);
};
struct bulk_executor_with_shape_type_without_index_type
{
struct shape_type
{
size_t n;
};
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, shape_type n, ResultFactory result_factory, SharedFactory shared_factory);
};
struct bulk_executor_with_index_type
{
struct index_type
{
size_t i;
};
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory);
};
struct bulk_executor_with_shape_type_with_index_type
{
struct shape_type
{
size_t n;
};
struct index_type
{
size_t i;
};
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, shape_type n, ResultFactory result_factory, SharedFactory shared_factory);
};
int main()
{
static_assert(!agency::detail::is_detected<agency::executor_index_t, not_an_executor>::value,
"executor_index_t<not_an_executor> should not be detected");
static_assert(agency::detail::is_detected_exact<size_t, agency::executor_index_t, bulk_executor_without_index_type>::value,
"bulk_executor_without_index_type should have size_t index_type");
static_assert(agency::detail::is_detected_exact<bulk_executor_with_shape_type_without_index_type::shape_type, agency::executor_index_t, bulk_executor_with_shape_type_without_index_type>::value,
"bulk_executor_with_shape_type_without_index_type should have bulk_executor_with_shape_type_without_index_type::shape_type index_type");
static_assert(agency::detail::is_detected_exact<bulk_executor_with_index_type::index_type, agency::executor_index_t, bulk_executor_with_index_type>::value,
"bulk_executor_with_index_type should have bulk_executor_with_index_type::index_type index_type");
static_assert(agency::detail::is_detected_exact<bulk_executor_with_shape_type_with_index_type::index_type, agency::executor_index_t, bulk_executor_with_shape_type_with_index_type>::value,
"bulk_executor_with_shape_type_with_index_type should have bulk_executor_with_shape_type_with_index_type::index_type index_type");
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <agency/experimental/ranges/all.hpp>
#include <type_traits>
#include <utility>
namespace agency
{
namespace experimental
{
// flatten_view does not assume the size of the segments are the same
// operator[] and size() would have more efficient implementations if
// we made that assumption
// we should consider another kind of fancy range that would "un-tile" a collection of tiles
template<class RangeOfRanges>
class flatten_view
{
private:
using inner_range_type = range_reference_t<RangeOfRanges>;
using all_t = agency::experimental::all_t<RangeOfRanges&>;
template<class> friend class flatten_view;
public:
using difference_type = range_difference_t<inner_range_type>;
using size_type = range_size_t<inner_range_type>;
using value_type = range_value_t<inner_range_type>;
using reference = range_reference_t<inner_range_type>;
__AGENCY_ANNOTATION
flatten_view() = default;
__AGENCY_ANNOTATION
flatten_view(const flatten_view&) = default;
template<class OtherRangeOfRanges,
__AGENCY_REQUIRES(
std::is_convertible<
experimental::all_t<OtherRangeOfRanges>,
all_t
>::value
)
>
__AGENCY_ANNOTATION
flatten_view(OtherRangeOfRanges&& ranges)
: segments_(all(ranges))
{}
// converting copy constructor
template<class OtherRangeOfRanges,
__AGENCY_REQUIRES(
std::is_constructible<
all_t,
typename flatten_view<OtherRangeOfRanges>::all_t
>::value
)>
__AGENCY_ANNOTATION
flatten_view(const flatten_view<OtherRangeOfRanges>& other)
: segments_(other.segments_)
{}
private:
__AGENCY_ANNOTATION
reference bracket_operator(size_type element_idx, size_t current_segment_idx) const
{
auto& segment = segments_[current_segment_idx];
auto size = segment.size();
// if the element is within the current segment, return it
// otherwise, recurse
// note that attempting to index an element that lies beyond the end of this view
// will not terminate the recursion
return element_idx < size ?
segment[element_idx] :
bracket_operator(element_idx - size, current_segment_idx + 1);
}
public:
__AGENCY_ANNOTATION
reference operator[](size_type i) const
{
// seems like we have to do a linear search through the segments
// so, it't not clear this can be computed in O(1)
// OTOH, it's not O(N) either (N being the total number of elements viewed by this view)
return bracket_operator(i, 0);
}
__AGENCY_ANNOTATION
size_type size() const
{
size_type result = 0;
for(auto& segment : segments_)
{
result += segment.size();
}
return result;
}
class iterator
{
public:
using value_type = typename flatten_view::value_type;
using reference = typename flatten_view::reference;
using difference_type = typename flatten_view::difference_type;
using pointer = value_type*;
using iterator_category = std::random_access_iterator_tag;
// dereference
__AGENCY_ANNOTATION
reference operator*() const
{
return self_[current_position_];
}
// pre-increment
__AGENCY_ANNOTATION
iterator operator++()
{
++current_position_;
return *this;
}
// pre-decrement
__AGENCY_ANNOTATION
iterator operator--()
{
--current_position_;
return *this;
}
// post-increment
__AGENCY_ANNOTATION
iterator operator++(int)
{
iterator result = *this;
current_position_++;
return result;
}
// post-decrement
__AGENCY_ANNOTATION
iterator operator--(int)
{
iterator result = *this;
current_position_--;
return result;
}
// add-assign
__AGENCY_ANNOTATION
iterator operator+=(size_type n)
{
current_position_ += n;
return *this;
}
// minus-assign
__AGENCY_ANNOTATION
iterator operator-=(size_type n)
{
current_position_ -= n;
return *this;
}
// add
__AGENCY_ANNOTATION
iterator operator+(size_type n)
{
iterator result = *this;
result += n;
return result;
}
// minus
__AGENCY_ANNOTATION
iterator operator-(size_type n)
{
iterator result = *this;
result -= n;
return result;
}
// bracket
__AGENCY_ANNOTATION
reference operator[](size_type n)
{
iterator tmp = *this + n;
return *tmp;
}
// equal
__AGENCY_ANNOTATION
bool operator==(const iterator& rhs) const
{
// we assume that *this and rhs came from the same flattened_view,
// so we do not compare their self_ members
return current_position_ == rhs.current_position_;
}
// not equal
__AGENCY_ANNOTATION
bool operator!=(const iterator& rhs) const
{
return !(*this == rhs);
}
// difference
__AGENCY_ANNOTATION
difference_type operator-(const iterator& rhs) const
{
return current_position_ - rhs.current_position_;
}
private:
friend flatten_view;
__AGENCY_ANNOTATION
iterator(size_type current_position, const flatten_view& self)
: current_position_(current_position),
self_(self)
{}
// XXX a more efficient implementation would track the current segment
// XXX and the current position within the segment
// could keep an iterator to the current segment
// would make operator- and operator+= less efficient because they would involve linear searches
size_type current_position_;
flatten_view self_;
};
__AGENCY_ANNOTATION
iterator begin() const
{
return iterator(0, *this);
}
__AGENCY_ANNOTATION
iterator end() const
{
return iterator(size(), *this);
}
private:
all_t segments_;
public:
__AGENCY_ANNOTATION
auto segment(size_type i) const
-> decltype(all(this->segments_[i]))
{
return all(segments_[i]);
}
__AGENCY_ANNOTATION
const all_t& segments() const
{
return segments_;
}
};
template<class RangeOfRanges>
__AGENCY_ANNOTATION
flatten_view<RangeOfRanges> flatten(RangeOfRanges&& ranges)
{
return flatten_view<RangeOfRanges>(std::forward<RangeOfRanges>(ranges));
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <agency/experimental/ranges/all.hpp>
#include <agency/experimental/ranges/counted.hpp>
#include <agency/experimental/ranges/stride.hpp>
namespace agency
{
namespace experimental
{
namespace detail
{
template<class View, class Difference = range_difference_t<View>>
class chunk_iterator
{
public:
using difference_type = Difference;
using base_iterator_type = stride_iterator<range_iterator_t<View>,Difference>;
using base_sentinel_type = stride_sentinel<range_sentinel_t<View>>;
using value_type = counted_view<range_iterator_t<View>,Difference>;
using reference = value_type;
template<class Iterator,
class Sentinel,
class = typename std::enable_if<
std::is_constructible<base_iterator_type, Iterator, difference_type>::value &&
std::is_constructible<base_sentinel_type, Sentinel>::value
>::type
>
__AGENCY_ANNOTATION
chunk_iterator(Iterator iter, Sentinel end, difference_type chunk_size)
: current_position_(iter, chunk_size),
end_(end)
{}
__AGENCY_ANNOTATION
chunk_iterator(View all, difference_type chunk_size)
: chunk_iterator(all.begin(), all.end(), chunk_size)
{}
__AGENCY_ANNOTATION
difference_type chunk_size() const
{
return current_position_.stride();
}
__AGENCY_ANNOTATION
void operator++()
{
++current_position_;
}
__AGENCY_ANNOTATION
void operator+=(difference_type n)
{
current_position_ += n;
}
__AGENCY_ANNOTATION
reference operator[](difference_type i) const
{
auto tmp = *this;
tmp += i;
return *tmp;
}
__AGENCY_ANNOTATION
value_type operator*() const
{
auto end_of_current_chunk = base();
++end_of_current_chunk;
if(end_of_current_chunk == end())
{
auto size_of_last_chunk = end().base() - base().base();
return value_type(base().base(), size_of_last_chunk);
}
return value_type(base().base(), chunk_size());
}
__AGENCY_ANNOTATION
const base_iterator_type& base() const
{
return current_position_;
}
__AGENCY_ANNOTATION
const base_sentinel_type& end() const
{
return end_;
}
private:
base_iterator_type current_position_;
base_sentinel_type end_;
};
template<class View>
class chunk_sentinel
{
public:
using base_sentinel_type = typename chunk_iterator<View>::base_sentinel_type;
__AGENCY_ANNOTATION
chunk_sentinel(base_sentinel_type end)
: end_(end)
{}
__AGENCY_ANNOTATION
const base_sentinel_type& base() const
{
return end_;
}
private:
base_sentinel_type end_;
};
template<class View, class Difference>
__AGENCY_ANNOTATION
bool operator==(const chunk_iterator<View,Difference>& lhs, const chunk_sentinel<View>& rhs)
{
return lhs.base() == rhs.base();
}
template<class View, class Difference>
__AGENCY_ANNOTATION
bool operator!=(const chunk_iterator<View,Difference>& lhs, const chunk_sentinel<View>& rhs)
{
return !(lhs == rhs);
}
template<class View, class Difference>
__AGENCY_ANNOTATION
bool operator!=(const chunk_sentinel<View> &lhs, const chunk_iterator<View,Difference>& rhs)
{
return rhs != lhs;
}
template<class View, class Difference>
__AGENCY_ANNOTATION
typename chunk_iterator<View,Difference>::difference_type
operator-(const chunk_sentinel<View>& lhs, const chunk_iterator<View,Difference>& rhs)
{
return lhs.base() - rhs.base();
}
} // end detail
template<class Range, class Difference = range_difference_t<Range>>
class chunk_view
{
public:
using difference_type = Difference;
__AGENCY_ANNOTATION
chunk_view() = default;
__AGENCY_ANNOTATION
chunk_view(Range rng, difference_type n)
: begin_(all(rng), n)
{}
private:
using all_t = agency::experimental::all_t<Range>;
public:
using iterator = detail::chunk_iterator<all_t, difference_type>;
using value_type = typename iterator::value_type;
__AGENCY_ANNOTATION
value_type operator[](difference_type i) const
{
return begin()[i];
}
__AGENCY_ANNOTATION
value_type empty_chunk() const
{
// XXX value_type happens to be an instantiation of counted_view
value_type first_chunk = *begin();
return value_type(first_chunk.begin(), difference_type(0));
}
__AGENCY_ANNOTATION
value_type chunk_or_empty(difference_type i) const
{
return i < size() ? operator[](i) : empty_chunk();
}
__AGENCY_ANNOTATION
iterator begin() const
{
return begin_;
}
using sentinel = detail::chunk_sentinel<all_t>;
__AGENCY_ANNOTATION
sentinel end() const
{
return sentinel(begin().end());
}
__AGENCY_ANNOTATION
difference_type chunk_size() const
{
return begin().chunk_size();
}
__AGENCY_ANNOTATION
difference_type size() const
{
return (end().base().base() - begin().base().base() + chunk_size() - 1) / (chunk_size());
}
private:
iterator begin_;
}; // end chunk_view
template<class Range, class Difference>
__AGENCY_ANNOTATION
chunk_view<Range,Difference> chunk(Range&& rng, Difference chunk_size)
{
return {std::forward<Range>(rng), chunk_size};
}
template<class Range, class Difference>
__AGENCY_ANNOTATION
auto chunk_evenly(Range&& rng, Difference number_of_chunks) ->
decltype(
chunk(std::forward<Range>(rng), std::declval<Difference>())
)
{
Difference chunk_size = (rng.size() + number_of_chunks - 1) / number_of_chunks;
return chunk(std::forward<Range>(rng), chunk_size);
}
} // end experimental
} // end agency
<file_sep>#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/utility.hpp>
#include <agency/experimental/variant.hpp>
#include <agency/experimental/optional.hpp>
#include <exception>
#include <type_traits>
#include <future>
namespace agency
{
// declare always_ready_future for detail::always_ready_then below
template<class T>
class always_ready_future;
namespace detail
{
// declare always_ready_then() for always_ready_future::then() below
template<class T, class Function>
always_ready_future<detail::result_of_t<Function&&(T&)>>
always_ready_then(always_ready_future<T>& future, Function&& f);
template<class Function>
always_ready_future<detail::result_of_t<Function&&()>>
always_ready_then(always_ready_future<void>& future, Function&& f);
} // end detail
// always_ready_future is a future that is always created in a ready state
//
// Executors which always block their client can use always_ready_future as their
// associated future and still expose two-way asynchronous execution functions like async_execute()
template<class T>
class always_ready_future
{
public:
always_ready_future(const T& value) : state_(value) {}
always_ready_future(T&& value) : state_(std::move(value)) {}
always_ready_future(std::exception_ptr e) : state_(e) {}
always_ready_future(always_ready_future&& other)
: state_()
{
detail::adl_swap(state_, other.state_);
}
always_ready_future& operator=(always_ready_future&& other)
{
state_.reset();
detail::adl_swap(state_, other.state_);
return *this;
}
template<class U,
__AGENCY_REQUIRES(
std::is_constructible<T,U&&>::value
)>
static always_ready_future make_ready(U&& value)
{
return always_ready_future(std::forward<U>(value));
}
private:
struct get_visitor
{
T operator()(T& value) const
{
return std::move(value);
}
T operator()(std::exception_ptr& e) const
{
throw e;
// XXX rework this visitor to avoid returning T in both cases
return T();
}
};
public:
T get()
{
if(!valid())
{
throw std::future_error(std::future_errc::no_state);
}
T result = experimental::visit(get_visitor(), *state_);
state_ = experimental::nullopt;
return result;
}
void wait() const
{
// wait() is a no-op: this is always ready
}
bool valid() const
{
return state_.has_value();
}
template<class Function>
always_ready_future<detail::result_of_t<detail::decay_t<Function>(T&)>>
then(Function&& f)
{
return detail::always_ready_then(*this, std::forward<Function>(f));
}
private:
experimental::optional<experimental::variant<T,std::exception_ptr>> state_;
};
template<>
class always_ready_future<void>
{
public:
always_ready_future() : valid_(true) {}
always_ready_future(std::exception_ptr e) : exception_(e), valid_(true) {}
always_ready_future(always_ready_future&& other)
: exception_(), valid_(false)
{
detail::adl_swap(exception_, other.exception_);
detail::adl_swap(valid_, other.valid_);
}
always_ready_future& operator=(always_ready_future&& other)
{
exception_.reset();
valid_ = false;
detail::adl_swap(exception_, other.exception_);
detail::adl_swap(valid_, other.valid_);
return *this;
}
static always_ready_future make_ready()
{
return always_ready_future();
}
public:
void get()
{
if(!valid())
{
throw std::future_error(std::future_errc::no_state);
}
valid_ = false;
if(exception_)
{
throw exception_.value();
}
}
void wait() const
{
// wait() is a no-op: this is always ready
}
bool valid() const
{
return valid_;
}
template<class Function>
always_ready_future<detail::result_of_t<detail::decay_t<Function>()>>
then(Function&& f)
{
return detail::always_ready_then(*this, std::forward<Function>(f));
}
private:
experimental::optional<std::exception_ptr> exception_;
bool valid_;
};
template<class T>
always_ready_future<detail::decay_t<T>> make_always_ready_future(T&& value)
{
return always_ready_future<detail::decay_t<T>>(std::forward<T>(value));
}
inline always_ready_future<void> make_always_ready_future()
{
return always_ready_future<void>();
}
template<class T>
always_ready_future<T> make_always_ready_exceptional_future(std::exception_ptr e)
{
return always_ready_future<T>(e);
}
template<class Function,
class... Args,
__AGENCY_REQUIRES(
!std::is_void<detail::result_of_t<Function&&(Args&&...)>>::value
)>
always_ready_future<
detail::result_of_t<Function&&(Args&&...)>
>
try_make_always_ready_future(Function&& f, Args&&... args)
{
try
{
return make_always_ready_future(std::forward<Function>(f)(std::forward<Args>(args)...));
}
catch(...)
{
using result_type = detail::result_of_t<Function&&(Args&&...)>;
return make_always_ready_exceptional_future<result_type>(std::current_exception());
}
}
template<class Function,
class... Args,
__AGENCY_REQUIRES(
std::is_void<detail::result_of_t<Function&&(Args&&...)>>::value
)>
always_ready_future<void> try_make_always_ready_future(Function&& f, Args&&... args)
{
try
{
std::forward<Function>(f)(std::forward<Args>(args)...);
return make_always_ready_future();
}
catch(...)
{
return make_always_ready_exceptional_future<void>(std::current_exception());
}
}
namespace detail
{
template<class T, class Function>
always_ready_future<detail::result_of_t<Function&&(T&)>>
always_ready_then(always_ready_future<T>& future, Function&& f)
{
auto argument = future.get();
return agency::try_make_always_ready_future(std::forward<Function>(f), argument);
}
template<class Function>
always_ready_future<detail::result_of_t<Function&&()>>
always_ready_then(always_ready_future<void>& future, Function&& f)
{
future.get();
return agency::try_make_always_ready_future(std::forward<Function>(f));
}
} // end detail
} // end agency
<file_sep>This is the top-level directory of Agency's test programs.
# Building and Running Test Programs
Each test program is built from a single source file. To build a test program by hand, compile a source file with a C++11 or better compiler. For example, the following command builds the `bulk_invoke/multiple_results.cpp` source file from the `testing` directory:
$ clang -I.. -std=c++11 -lstdc++ -pthread bulk_invoke/multiple_results.cpp
CUDA C++ source (`.cu` files) should be built with the NVIDIA compiler (`nvcc`). Include the `--expt-extended-lambda` option:
$ nvcc -I.. -std=c++11 --expt-extended-lambda bulk_invoke/cuda/multiple_results.cu
## Automated Builds
To build the test programs automatically, run the following command from this directory:
$ scons
To accelerate the build process, run the following command to run 8 jobs in parallel:
$ scons -j8
To build *and* run the test programs, specify `run_tests` as a command line argument:
$ scons -j8 run_tests
To build all tests underneath a particular subdirectory, run `scons` with the path to the subdirectory of interest as a command line argument.
For example, the following command builds all of the test programs underneath the `executor_traits` subdirectory:
$ scons executor_traits
Likewise, the following command will build *and* run the tests programs underneath the `executor_traits` subdirectory:
$ scons executor_traits/run_tests
# Build System Structure
The top-level directory named 'testing' contains a single `SConstruct` file. This file contains definitions of common functionality used by the rest of the build system.
After setting up a SCons build environment, the `SConstruct` sets up a hierarchical build by invoking the top-level `SConscript` files in the root directory.
The top-level `SConscript` file calls the `RecursivelyCreateProgramsAndUnitTestAliases()` method to recursively traverse the directory tree and create a program and unit test from each source file.
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/flattened_executor.hpp>
#include <agency/cuda/execution/executor/grid_executor.hpp>
#include <agency/cuda/memory/allocator.hpp>
#include <agency/cuda/memory/resource/pinned_resource.hpp>
#include <agency/experimental/ndarray/ndarray.hpp>
#include <agency/cuda/future.hpp>
namespace agency
{
namespace cuda
{
namespace this_thread
{
class parallel_executor
{
public:
using execution_category = parallel_execution_tag;
template<class T>
using allocator = cuda::allocator<T, pinned_resource>;
template<class T>
using future = cuda::future<T>;
template<class Function, class ResultFactory, class SharedFactory>
__AGENCY_ANNOTATION
agency::detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
auto result = result_factory();
auto shared_arg = shared_factory();
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_arg);
}
return std::move(result);
}
};
} // end this_thread
using parallel_executor = flattened_executor<grid_executor>;
} // end cuda
} // end agency
<file_sep>// Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <stddef.h> // XXX instead of <cstddef> to WAR clang issue
#include <type_traits>
#include <utility>
namespace agency
{
namespace detail
{
template<class T>
struct tuple_use_empty_base_class_optimization
: std::integral_constant<
bool,
std::is_empty<T>::value
#if __cplusplus >= 201402L
&& !std::is_final<T>::value
#endif
>
{};
template<class T, bool = tuple_use_empty_base_class_optimization<T>::value>
class tuple_leaf_base
{
public:
__agency_exec_check_disable__
__AGENCY_ANNOTATION
tuple_leaf_base() = default;
__agency_exec_check_disable__
template<class U>
__AGENCY_ANNOTATION
tuple_leaf_base(U&& arg) : val_(std::forward<U>(arg)) {}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~tuple_leaf_base() = default;
__AGENCY_ANNOTATION
const T& const_get() const
{
return val_;
}
__AGENCY_ANNOTATION
T& mutable_get()
{
return val_;
}
private:
T val_;
};
template<class T>
class tuple_leaf_base<T,true> : public T
{
public:
__AGENCY_ANNOTATION
tuple_leaf_base() = default;
template<class U>
__AGENCY_ANNOTATION
tuple_leaf_base(U&& arg) : T(std::forward<U>(arg)) {}
__AGENCY_ANNOTATION
const T& const_get() const
{
return *this;
}
__AGENCY_ANNOTATION
T& mutable_get()
{
return *this;
}
};
template<std::size_t I, class T>
class tuple_leaf : public tuple_leaf_base<T>
{
private:
using super_t = tuple_leaf_base<T>;
public:
__AGENCY_ANNOTATION
tuple_leaf() = default;
template<class U,
__AGENCY_REQUIRES(
std::is_constructible<T,U>::value
)>
__AGENCY_ANNOTATION
tuple_leaf(U&& arg) : super_t(std::forward<U>(arg)) {}
__AGENCY_ANNOTATION
tuple_leaf(const tuple_leaf& other) : super_t(other.const_get()) {}
__AGENCY_ANNOTATION
tuple_leaf(tuple_leaf&& other) : super_t(std::forward<T>(other.mutable_get())) {}
template<class U,
__AGENCY_REQUIRES(
std::is_constructible<T,const U&>::value
)>
__AGENCY_ANNOTATION
tuple_leaf(const tuple_leaf<I,U>& other) : super_t(other.const_get()) {}
// converting move-constructor
// note the use of std::forward<U> here to allow construction of T from U&&
template<class U,
__AGENCY_REQUIRES(
std::is_constructible<T,U&&>::value
)>
__AGENCY_ANNOTATION
tuple_leaf(tuple_leaf<I,U>&& other) : super_t(std::forward<U>(other.mutable_get())) {}
__agency_exec_check_disable__
template<class U,
__AGENCY_REQUIRES(
std::is_assignable<T,U>::value
)>
__AGENCY_ANNOTATION
tuple_leaf& operator=(const tuple_leaf<I,U>& other)
{
this->mutable_get() = other.const_get();
return *this;
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
tuple_leaf& operator=(const tuple_leaf& other)
{
this->mutable_get() = other.const_get();
return *this;
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
tuple_leaf& operator=(tuple_leaf&& other)
{
this->mutable_get() = std::forward<T>(other.mutable_get());
return *this;
}
__agency_exec_check_disable__
template<class U,
__AGENCY_REQUIRES(
std::is_assignable<T,U&&>::value
)>
__AGENCY_ANNOTATION
tuple_leaf& operator=(tuple_leaf<I,U>&& other)
{
this->mutable_get() = std::forward<U>(other.mutable_get());
return *this;
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
int swap(tuple_leaf& other)
{
using std::swap;
swap(this->mutable_get(), other.mutable_get());
return 0;
}
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/future/future_traits/is_future.hpp>
#include <agency/future/future_traits/future_rebind_value.hpp>
#include <agency/future.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
namespace future_cast_detail
{
// two Futures are of the same "kind" if we can rebind the value of one using the other's value
// and we get the same type as the other
// e.g., std::future<T> and std::future<U> are the same kind of future
template<class Future1, class Future2>
struct is_same_kind_of_future
: std::is_same<
Future1,
future_rebind_value_t<
Future2,
future_value_t<Future1>
>
>
{};
// this version of future_cast_impl() handles the case
// when we are only casting the value type of a future, and not
// changing the kind of future we are dealing with
// in other words, we are casting from e.g. std::future<T> -> std::future<U>
template<class ToFuture, class FromFuture,
__AGENCY_REQUIRES(is_same_kind_of_future<ToFuture,FromFuture>::value)
>
__AGENCY_ANNOTATION
ToFuture future_cast_impl(FromFuture& from_future)
{
using to_value_type = future_value_t<ToFuture>;
// we can use future_traits in this case
return future_traits<FromFuture>::template cast<to_value_type>(from_future);
}
template<class FromFuture, class ToValue>
struct future_cast_functor
{
mutable FromFuture from_future;
// this handles the case when from_future is a void future
// this only makes sense when ToValue is also void
template<class FromValue = future_value_t<FromFuture>,
class ToValue1 = ToValue,
__AGENCY_REQUIRES(std::is_void<FromValue>::value),
__AGENCY_REQUIRES(std::is_void<ToValue1>::value)
>
__AGENCY_ANNOTATION
void operator()() const
{
from_future.wait();
}
// this handles the case when from_future is a non-void future
template<class FromValue = future_value_t<FromFuture>,
__AGENCY_REQUIRES(!std::is_void<FromValue>::value)
>
__AGENCY_ANNOTATION
ToValue operator()() const
{
return static_cast<ToValue>(from_future.get());
}
};
// this version of future_cast_impl() handles the case
// when we are casting the kind of future and possibly the value type as well
// in other words, we are casting from e.g. std::future<T> -> my_future<U>
__agency_exec_check_disable__
template<class ToFuture, class FromFuture,
__AGENCY_REQUIRES(!is_same_kind_of_future<ToFuture,FromFuture>::value)
>
__AGENCY_ANNOTATION
ToFuture future_cast_impl(FromFuture& from_future)
{
// create a ready void future of the same kind as ToFuture
auto ready = future_traits<ToFuture>::make_ready();
using to_value_type = future_value_t<ToFuture>;
// create a continuation to wait on from_future & cast its result
return agency::future_traits<decltype(ready)>::then(ready, future_cast_functor<FromFuture, to_value_type>{std::move(from_future)});
}
} // end future_cast_detail
template<class ToFuture, class FromFuture,
__AGENCY_REQUIRES(is_future<ToFuture>::value && is_future<FromFuture>::value),
__AGENCY_REQUIRES(std::is_convertible<future_value_t<FromFuture>, future_value_t<ToFuture>>::value)
>
__AGENCY_ANNOTATION
ToFuture future_cast(FromFuture& from_future)
{
return future_cast_detail::future_cast_impl<ToFuture>(from_future);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points/bulk_async_execute.hpp>
namespace agency
{
__agency_exec_check_disable__
template<class E, class Function, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(detail::BulkSynchronousExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
detail::result_of_t<ResultFactory()>
bulk_sync_execute(E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, Factories... shared_factories)
{
return exec.bulk_sync_execute(f, shape, result_factory, shared_factories...);
}
__agency_exec_check_disable__
template<class E, class Function, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(detail::BulkExecutor<E>() && !detail::BulkSynchronousExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
detail::result_of_t<ResultFactory()>
bulk_sync_execute(E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, Factories... shared_factories)
{
return agency::bulk_async_execute(exec, f, shape, result_factory, shared_factories...).get();
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_auto_result_and_without_shared_parameters.hpp>
#include <agency/execution/executor/detail/utility/executor_bulk_result.hpp>
#include <agency/future.hpp>
namespace agency
{
namespace detail
{
template<class SharedFuture>
class bulk_share_future_functor
{
private:
SharedFuture sf_;
public:
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bulk_share_future_functor(const SharedFuture& sf)
: sf_(sf)
{}
__AGENCY_ANNOTATION
bulk_share_future_functor(const bulk_share_future_functor& other)
: bulk_share_future_functor(other.sf_)
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~bulk_share_future_functor()
{}
__agency_exec_check_disable__
template<class Index>
__AGENCY_ANNOTATION
SharedFuture operator()(const Index&) const
{
return sf_;
}
};
__agency_exec_check_disable__
template<class E, class Future,
__AGENCY_REQUIRES(Executor<E>())
>
__AGENCY_ANNOTATION
executor_bulk_result_t<E, typename future_traits<Future>::shared_future_type>
bulk_share_future(E& exec, executor_shape_t<E> shape, Future& f)
{
using shared_future_type = typename future_traits<Future>::shared_future_type;
// explicitly share f once to get things started
shared_future_type shared_f = future_traits<Future>::share(f);
// bulk execute a function that returns copies of shared_f
return bulk_sync_execute_with_auto_result_and_without_shared_parameters(exec, bulk_share_future_functor<shared_future_type>(shared_f), shape);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/container/bulk_result.hpp>
#include <agency/detail/type_traits.hpp>
namespace agency
{
namespace detail
{
template<class Executor, class T, bool Enable = is_bulk_executor<Executor>::value>
struct executor_bulk_result {};
template<class Executor, class T>
struct executor_bulk_result<Executor,T,true>
{
using type = bulk_result<
T,
executor_shape_t<Executor>,
executor_allocator_t<Executor,T>
>;
};
template<class Executor, class T>
using executor_bulk_result_t = typename executor_bulk_result<Executor,T>::type;
} // end detail
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <agency/execution/executor/detail/utility.hpp>
#include <iostream>
#include "../test_executors.hpp"
template<class Executor>
void test(Executor exec)
{
using index_type = agency::executor_index_t<Executor>;
size_t shape = 10;
auto result = agency::detail::bulk_sync_execute_with_collected_result(exec,
[](index_type idx, std::vector<int>& shared_arg)
{
return shared_arg[idx];
},
shape,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
assert(std::vector<int>(shape, 13) == result);
}
int main()
{
test(bulk_synchronous_executor());
test(bulk_asynchronous_executor());
test(bulk_continuation_executor());
test(not_a_bulk_synchronous_executor());
test(not_a_bulk_asynchronous_executor());
test(not_a_bulk_continuation_executor());
test(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>Agency v0.2.0 Changelog
=======================
## Summary
TODO
### Breaking Changes
* `cuda::split_allocator` has been renamed `cuda::heterogeneous_allocator`
## New Features
### New Control Structures
* `async`
* `invoke`
### New Execution Policies
* `concurrent_execution_policy_2d`
* `sequenced_execution_policy_2d`
* `parallel_execution_policy_2d`
* `unsequenced_execution_policy_2d`
* OpenMP-specific execution policies
* `omp::parallel_execution_policy`
* `omp::unsequenced_execution_policy`
### New Executors
* `cuda::concurrent_grid_executor`
* `omp::parallel_for_executor` AKA `omp::parallel_executor`
* `omp::simd_executor` AKA `omp::unsequenced_executor`
* `experimental::unrolling_executor`
* `variant_executor`
### New Utilities
* `array`
* `tuple`
* `shared`
* `shared_array`
* `shared_vector`
* `vector`
* `cuda::device`
* `cuda::devices`
* `cuda::all_devices`
### New Experimental Execution Policies
* `cuda::experimental::static_grid`
* `cuda::experimental::static_con`
### New Experimental Utilities
* `experimental::ndarray`
* `experimental::ndarray_ref`
* `cuda::experimental::make_async_future`
* `cuda::experimental::native_handle`
* New fancy ranges
* `experimental::interval()`
* `experimental::iota_view`
* `experimental::transformed_view`
* `experimental::zip_with_view`
### New Examples
* [`fork_executor.cpp`](../0.2.0/examples/fork_executor.cpp)
## Resolved Issues
* [#347](../../issues/347) Various warnings at aggressive reporting levels have been eliminated
* [#289](../../issues/289) `async_future::bulk_then()` needs to schedule the `outer_arg`'s destruction
* [#352](../../issues/352) .rank() generates results in the wrong order
Agency v0.1.0 Changelog
=======================
## Summary
Agency 0.1.0 introduces new **control structures** such as `bulk_invoke()` for creating parallel tasks. A suite of new **execution policies** compose with these control structures to require different kinds of semantic guarantees from the created tasks. A new library of **executors** controls the mapping of tasks onto underlying execution resources such as CPUs, GPUs, and collections of multiple GPUs. In addition to these basic components, this release also introduces experimental support for a collection of utility types useful for creating Agency programs.
## New Features
### New Control Structures
* `bulk_invoke`
* `bulk_async`
* `bulk_then`
### New Execution Policies
* `concurrent_execution_policy`
* `sequenced_execution_policy`
* `parallel_execution_policy`
* `unsequenced_execution_policy`
* CUDA-specific execution policies
* `cuda::concurrent_execution_policy`
* `cuda::parallel_execution_policy`
* `cuda::grid`
### New Executors
* `concurrent_executor`
* `executor_array`
* `flattened_executor`
* `parallel_executor`
* `scoped_executor`
* `sequenced_executor`
* `unsequenced_executor`
* `vector_executor`
* CUDA-specific executors
* `cuda::block_executor`
* `cuda::concurrent_executor`
* `cuda::grid_executor`
* `cuda::grid_executor_2d`
* `cuda::multidevice_executor`
* `cuda::parallel_executor`
### New Experimental Utilities
* `experimental::array`
* `experimental::bounded_integer`
* `experimental::optional`
* `experimental::short_vector`
* `experimental::span`
* Fancy ranges based on the [range-v3](http://github.com/ericniebler/range-v3) library
* `experimental::chunk_view`
* `experimental::counted_view`
* `experimental::stride_view`
* `experimental::zip_view`
### New Examples
* [`concurrent_ping_pong.cpp`](../0.1.0/examples/concurrent_ping_pong.cpp)
* [`concurrent_sum.cpp`](../0.1.0/examples/concurrent_sum.cpp)
* [`fill.cpp`](../0.1.0/examples/fill.cpp)
* [`hello_async.cpp`](../0.1.0/examples/hello_async.cpp)
* [`hello_lambda.cpp`](../0.1.0/examples/hello_lambda.cpp)
* [`hello_then.cpp`](../0.1.0/examples/hello_then.cpp)
* [`hello_world.cpp`](../0.1.0/examples/hello_world.cpp)
* [`ping_pong_tournament.cpp`](../0.1.0/examples/ping_pong_tournament.cpp)
* [`saxpy.cpp`](../0.1.0/examples/saxpy.cpp)
* [`version.cpp`](../0.1.0/examples/version.cpp)
* CUDA-specific example programs
* [`async_reduce.cu`](../0.1.0/examples/cuda/async_reduce.cu)
* [`black_scholes.cu`](../0.1.0/examples/cuda/black_scholes.cu)
* [`hello_device_lambda.cu`](../0.1.0/examples/cuda/hello_device_lambda.cu)
* [`multigpu_saxpy.cu`](../0.1.0/examples/cuda/multigpu_saxpy.cu)
* [`saxpy.cu`](../0.1.0/examples/cuda/saxpy.cu)
* [`simple_on.cu`](../0.1.0/examples/cuda/simple_on.cu)
* [`transpose.cu`](../0.1.0/examples/cuda/transpose.cu)
## Known Issues
* [#255](../../issues/255) Agency is not known to work with any version of the Microsoft Compiler
* [#256](../../issues/256) Agency is not known to work with NVIDIA Compiler versions prior to 8.0
* [#257](../../issues/257) Agency is not known to work with NVIDIA GPU architectures prior to `sm_3x`
## Acknowledgments
* Thanks to <NAME> for significant input into Agency's overall design.
* Thanks to the NVIDIA compiler team, especially <NAME>, for enhancements to `nvcc`'s C++ support.
* Thanks to <NAME>, <NAME>, and <NAME> for testing this release during development.
* Thanks to <NAME> and <NAME> for design feedback.
* Thanks to <NAME> for contributing an implementation of synchronic.
Agency v0.0.0 Changelog
=======================
## Summary
This version of Agency was not released.
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/future.hpp>
#include <agency/execution/executor/customization_points/then_execute.hpp>
#include <agency/execution/executor/executor_traits/executor_future.hpp>
#include <agency/execution/executor/executor_traits/is_executor.hpp>
#include <utility>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class T, class Executor, class Future>
struct has_future_cast_impl
{
using expected_future_type = executor_future_t<Executor, T>;
template<
class Executor2,
class Result = decltype(
std::declval<Executor2&>().template future_cast<T>(std::declval<Future&>())
),
__AGENCY_REQUIRES(std::is_same<expected_future_type,Result>::value)
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class T, class Executor, class Future>
using has_future_cast = typename has_future_cast_impl<T,Executor,Future>::type;
// this overload handles executors which have .future_cast()
__agency_exec_check_disable__
template<class T, class E, class Future,
__AGENCY_REQUIRES(has_future_cast<T,E,Future>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,T> future_cast_impl(E& exec, Future& fut)
{
return exec.template future_cast<T>(fut);
}
template<class FromFuture, class ToFuture>
struct is_future_castable_impl
{
using from_value_type = typename agency::future_traits<FromFuture>::value_type;
using to_value_type = typename agency::future_traits<ToFuture>::value_type;
using cast_type = decltype(
agency::future_traits<FromFuture>::template cast<to_value_type>(std::declval<FromFuture&>())
);
using type = std::is_same<cast_type, ToFuture>;
};
template<class FromFuture, class ToFuture>
using is_future_castable = typename is_future_castable_impl<FromFuture,ToFuture>::type;
// this overload handles executors which do not have .future_cast()
// and when future_traits::cast() may be used
template<class T, class E, class Future,
__AGENCY_REQUIRES(detail::Executor<E>()),
__AGENCY_REQUIRES(!has_future_cast<T,E,Future>::value),
__AGENCY_REQUIRES(is_future_castable<Future, executor_future_t<E,T>>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,T> future_cast_impl(E&, Future& fut)
{
return future_traits<Future>::template cast<T>(fut);
}
template<class T>
struct future_cast_functor
{
// cast from U -> T
template<class U>
__AGENCY_ANNOTATION
T operator()(U& arg) const
{
return static_cast<T>(std::move(arg));
}
// cast from void -> void
// T would be void in this case
__AGENCY_ANNOTATION
T operator()() const
{
}
};
// this overload handles executors which do not have .future_cast(),
// and when future_traits::cast() may not be used
// in this case, we create a continuation to execute the conversion
template<class T, class E, class Future,
__AGENCY_REQUIRES(detail::Executor<E>()),
__AGENCY_REQUIRES(!has_future_cast<T,E,Future>::value),
__AGENCY_REQUIRES(!is_future_castable<Future, executor_future_t<E,T>>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,T> future_cast_impl(E& exec, Future& fut)
{
return agency::then_execute(exec, future_cast_functor<T>(), fut);
}
} // end detail
__agency_exec_check_disable__
template<class T, class E, class Future>
__AGENCY_ANNOTATION
executor_future_t<E,T> future_cast(E& exec, Future& fut)
{
return detail::future_cast_impl<T>(exec, fut);
} // end future_cast()
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/container/array.hpp>
#include <agency/experimental/span.hpp>
#include <agency/detail/type_traits.hpp>
#include <utility>
namespace agency
{
namespace experimental
{
// XXX this is only valid for contiguous containers
template<class Container>
__AGENCY_ANNOTATION
span<typename Container::value_type> all(Container& c)
{
return span<typename Container::value_type>(c);
}
// XXX this is only valid for contiguous containers
template<class Container>
__AGENCY_ANNOTATION
span<const typename Container::value_type> all(const Container& c)
{
return span<const typename Container::value_type>(c);
}
// XXX maybe should put this in array.hpp
template<class T, std::size_t N>
__AGENCY_ANNOTATION
span<T,N> all(array<T,N>& a)
{
return span<T,N>(a);
}
// XXX maybe should put this in array.hpp
template<class T, std::size_t N>
__AGENCY_ANNOTATION
span<const T,N> all(const array<T,N>& a)
{
return span<const T,N>(a);
}
// spans are already views, so don't wrap them
// XXX maybe should put this in span.hpp
template<class ElementType, std::ptrdiff_t Extent>
__AGENCY_ANNOTATION
span<ElementType,Extent> all(span<ElementType,Extent> s)
{
return s;
}
// note the diliberate use of ADL when calling all() here
template<class Range>
using all_t = agency::detail::decay_t<decltype(all(std::declval<Range>()))>;
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/detail/feature_test.hpp>
#include <agency/cuda/detail/terminate.hpp>
#include <agency/cuda/execution/detail/kernel/kernel.hpp>
#include <agency/cuda/execution/detail/kernel/launch_kernel.hpp>
#include <agency/cuda/detail/future/stream.hpp>
#include <agency/cuda/device.hpp>
namespace agency
{
namespace cuda
{
namespace detail
{
class event
{
public:
struct construct_ready_t {};
static constexpr construct_ready_t construct_ready{};
private:
static constexpr int event_create_flags = cudaEventDisableTiming;
__host__ __device__
event(construct_ready_t, detail::stream&& s)
: stream_(std::move(s))
{
#if __cuda_lib_has_cudart
// switch to the stream's device when creating the event
scoped_current_device scope(stream().device());
detail::throw_on_error(cudaEventCreateWithFlags(&e_, event_create_flags), "cudaEventCreateWithFlags in cuda::detail::event ctor");
#else
detail::terminate_with_message("cuda::detail::event ctor requires CUDART");
#endif
}
public:
// constructs a new event recorded on the given stream
__host__ __device__
event(detail::stream&& s) : event(construct_ready, std::move(s))
{
#if __cuda_lib_has_cudart
detail::throw_on_error(cudaEventRecord(e_, stream().native_handle()), "cudaEventRecord in cuda::detail::event ctor");
#else
detail::terminate_with_message("cuda::detail::event ctor requires CUDART");
#endif
}
// creates an invalid event
__host__ __device__
event() : stream_(), e_{0}
{}
__host__ __device__
event(construct_ready_t)
: event(construct_ready, detail::stream())
{}
__host__ __device__
event(const event&) = delete;
__host__ __device__
event(event&& other)
: event()
{
swap(other);
}
__host__ __device__
~event()
{
if(valid())
{
destroy_event();
}
}
__host__ __device__
event& operator=(event&& other)
{
swap(other);
return *this;
}
__host__ __device__
bool valid() const
{
return e_ != 0;
}
__host__ __device__
bool is_ready() const
{
if(valid())
{
#if __cuda_lib_has_cudart
cudaError_t result = cudaEventQuery(e_);
if(result != cudaErrorNotReady && result != cudaSuccess)
{
detail::throw_on_error(result, "cudaEventQuery in cuda::detail::event::is_ready");
}
return result == cudaSuccess;
#else
detail::terminate_with_message("cuda::detail::event::is_ready requires CUDART");
#endif
}
return false;
}
__host__ __device__
cudaError_t wait_and_return_cuda_error() const
{
#if __cuda_lib_has_cudart
# ifndef __CUDA_ARCH__
return cudaEventSynchronize(e_);
# else
return cudaDeviceSynchronize();
# endif // __CUDA_ARCH__
#else
return cudaErrorNotSupported;
#endif // __cuda_lib_has_cudart
}
__host__ __device__
void wait() const
{
// XXX should probably check for valid() here
detail::throw_on_error(wait_and_return_cuda_error(), "wait_and_return_cuda_error in cuda::detail::event::wait");
}
__host__ __device__
void swap(event& other)
{
stream().swap(other.stream());
cudaEvent_t tmp = e_;
e_ = other.e_;
other.e_ = tmp;
}
// XXX eliminate this
template<class Function, class... Args>
__host__ __device__
static auto then_kernel() ->
decltype(&cuda_kernel<Function,Args...>)
{
return &cuda_kernel<Function,Args...>;
}
// XXX eliminate this
template<class Function, class... Args>
__host__ __device__
static auto then_kernel(const Function&, const Args&...) ->
decltype(then_kernel<Function,Args...>())
{
return then_kernel<Function,Args...>();
}
// XXX eliminate this -- it's redundant with then_launch_kernel_and_leave_event_valid()
// this form of then() leaves this event in a valid state afterwards
template<class Function, class... Args>
__host__ __device__
event then(Function f, dim3 grid_dim, dim3 block_dim, int shared_memory_size, const Args&... args)
{
return then_on(f, grid_dim, block_dim, shared_memory_size, stream().device(), args...);
}
// XXX eliminate this -- it's redundant with then_launch_kernel()
// this form of then() leaves this event in an invalid state afterwards
template<class Function, class... Args>
__host__ __device__
event then_and_invalidate(Function f, dim3 grid_dim, dim3 block_dim, int shared_memory_size, const Args&... args)
{
// make the stream wait on this event before further launches
stream_wait_and_invalidate();
// get the address of the kernel
auto kernel = then_kernel(f,args...);
// launch the kernel on this event's stream
detail::try_launch_kernel(kernel, grid_dim, block_dim, shared_memory_size, stream().native_handle(), f, args...);
// return a new event
return event(std::move(stream()));
}
// XXX eliminate this
template<class Function, class... Args>
__host__ __device__
static auto then_on_kernel() ->
decltype(&cuda_kernel<Function,Args...>)
{
return &cuda_kernel<Function,Args...>;
}
// XXX eliminate this
template<class Function, class... Args>
__host__ __device__
static auto then_on_kernel(const Function&, const Args&...) ->
decltype(then_on_kernel<Function,Args...>())
{
return then_on_kernel<Function,Args...>();
}
// this function returns a new stream on the given device which depends on this event
__host__ __device__
inline detail::stream make_dependent_stream(const device_id& device) const
{
// create a new stream
detail::stream result(device);
// make the new stream wait on this event
stream_wait(result, *this);
return result;
}
// this function returns a new stream on the device associated with this event which depends on this event
__host__ __device__
inline detail::stream make_dependent_stream() const
{
return make_dependent_stream(stream().device());
}
// Returns: std::move(stream()) if device is the device associated with this event
// otherwise, it returns the result of make_dependent_stream(device)
// Post-condition: !valid()
__host__ __device__
detail::stream make_dependent_stream_and_invalidate(const device_id& device)
{
detail::stream result = (device == stream().device()) ? std::move(stream()) : make_dependent_stream(device);
// invalidate this event
*this = event();
return result;
}
// XXX eliminate this -- it's redundant with then_launch_kernel
// this form of then_on() leaves this event in an invalid state afterwards
template<class Function, class... Args>
__host__ __device__
event then_on_and_invalidate(Function f, dim3 grid_dim, dim3 block_dim, int shared_memory_size, const device_id& device, const Args&... args)
{
// make a stream for the continuation and invalidate this event
detail::stream new_stream = make_dependent_stream_and_invalidate(device);
// get the address of the kernel
auto kernel = then_on_kernel(f,args...);
// launch the kernel on the new stream
detail::try_launch_kernel_on_device(kernel, grid_dim, block_dim, shared_memory_size, new_stream.native_handle(), device.native_handle(), f, args...);
// return a new event
return event(std::move(new_stream));
}
// XXX eliminate this -- it's redundant with then_launch_kernel_and_leave_event_valid
// this form of then_on() leaves this event in a valid state afterwards
template<class Function, class... Args>
__host__ __device__
event then_on(Function f, dim3 grid_dim, dim3 block_dim, int shared_memory_size, const device_id& device, const Args&... args)
{
// create a stream for the kernel on the given device
detail::stream new_stream = make_dependent_stream(device);
// get the address of the kernel
auto kernel = then_on_kernel(f,args...);
// launch the kernel on the new stream
detail::try_launch_kernel_on_device(kernel, grid_dim, block_dim, shared_memory_size, new_stream.native_handle(), device.native_handle(), f, args...);
// return a new event
return event(std::move(new_stream));
}
// this form of then() leaves this event in a valid state afterwards
// XXX might want to see if we can receive f by forwarding reference
template<class Function>
__host__ __device__
event then(Function f)
{
#ifndef __CUDA_ARCH__
// if on host, use a stream callback
// if on device, use then_on()
// make a new stream dependent on this event for the callback on this event's device
detail::stream new_stream = make_dependent_stream();
// launch f on the new stream
new_stream.add_callback(f);
// return a new event
return event(std::move(new_stream));
#else
detail::terminate_with_message("cuda::detail::event::then(): unimplemented function called.");
return event();
// launch a single-thread kernel
//return then_on([=](uint3, uint3){ f(); }, dim3{1}, dim3{1}, 0, stream().device());
#endif
}
__host__ __device__
cudaEvent_t native_handle() const
{
return e_;
}
__host__ __device__
const stream& stream() const
{
return stream_;
}
private:
detail::stream stream_;
cudaEvent_t e_;
// this function returns 0 so that we can pass it as an argument to swallow(...)
__host__ __device__
int destroy_event()
{
#if __cuda_lib_has_cudart
// since this will likely be called from destructors, swallow errors
cudaError_t error = cudaEventDestroy(e_);
e_ = 0;
detail::print_error_message_if(error, "CUDA error after cudaEventDestroy in cuda::detail::event::destroy_event");
#endif // __cuda_lib_has_cudart
return 0;
}
__host__ __device__
detail::stream& stream()
{
return stream_;
}
// makes the given stream wait on the given event
__host__ __device__
static void stream_wait(detail::stream& s, const detail::event& e)
{
s.wait_on(e.native_handle());
}
// makes the given stream wait on this event
__host__ __device__
int stream_wait(detail::stream& s) const
{
stream_wait(s, *this);
return 0;
}
// makes the given stream wait on this event and invalidates this event
// this function returns 0 so that we can pass it as an argument to swallow(...)
__host__ __device__
int stream_wait_and_invalidate(detail::stream& s)
{
stream_wait(s);
// this operation invalidates this event
destroy_event();
return 0;
}
// makes this event's stream wait on this event and invalidates this event
__host__ __device__
int stream_wait_and_invalidate()
{
return stream_wait_and_invalidate(stream());
}
template<class... Args>
inline __host__ __device__
static void swallow(Args&&...) {}
template<class... Events>
__host__ __device__
friend event when_all_events_are_ready(const device_id& device, Events&... events);
template<class... Events>
__host__ __device__
friend event when_all_events_are_ready(Events&... events);
};
inline __host__ __device__
event make_ready_event()
{
return event(event::construct_ready);
}
inline __host__ __device__
event when_all_events_are_ready(const device_id& device, cudaStream_t s)
{
detail::stream new_stream{device};
// tell the new stream to wait on s
new_stream.wait_on(s);
// return a new event recorded on the new stream
return event(std::move(new_stream));
}
template<class... Events>
__host__ __device__
event when_all_events_are_ready(const device_id& device, Events&... events)
{
detail::stream s{device};
// tell the stream to wait on all the events
event::swallow(events.stream_wait_and_invalidate(s)...);
// return a new event recorded on the stream
return event(std::move(s));
}
template<class... Events>
__host__ __device__
event when_all_events_are_ready(Events&... events)
{
// just use the current device
// XXX we might prefer the device associated with the first event
return agency::cuda::detail::when_all_events_are_ready(current_device(), events...);
}
// a blocking_event is an event whose destructor calls .wait() when the blocking_event is valid
class blocking_event : public event
{
public:
inline __host__ __device__
blocking_event(blocking_event&& other)
: event(std::move(other))
{}
inline __host__ __device__
blocking_event(event&& other)
: event(std::move(other))
{}
inline __host__ __device__
~blocking_event()
{
if(valid())
{
// since we're in a destructor, let's avoid
// propagating exceptions out of a destructor
detail::print_error_message_if(wait_and_return_cuda_error(), "wait_and_return_cuda_error in cuda::detail::blocking_event() dtor");
}
}
};
} // end detail
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/is_simple_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
namespace agency
{
template<class T>
using is_executor = agency::detail::disjunction<
is_simple_executor<T>,
is_bulk_executor<T>
>;
namespace detail
{
// a fake Concept to use with __AGENCY_REQUIRES
template<class T>
constexpr bool Executor()
{
return is_executor<T>();
}
} // end detail
} // end agency
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/execution/executor/sequenced_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
int main()
{
using namespace agency;
static_assert(is_bulk_synchronous_executor<sequenced_executor>::value,
"sequenced_executor should be a bulk synchronous executor");
static_assert(is_bulk_executor<sequenced_executor>::value,
"sequenced_executor should be a bulk executor");
static_assert(detail::is_detected_exact<sequenced_execution_tag, executor_execution_category_t, sequenced_executor>::value,
"sequenced_executor should have sequenced_execution_tag execution_category");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, sequenced_executor>::value,
"sequenced_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, sequenced_executor>::value,
"sequenced_executor should have size_t index_type");
static_assert(detail::is_detected_exact<std::future<int>, executor_future_t, sequenced_executor, int>::value,
"sequenced_executor should have std::future furture");
static_assert(executor_execution_depth<sequenced_executor>::value == 1,
"sequenced_executor should have execution_depth == 1");
sequenced_executor exec;
size_t shape = 10;
auto result = exec.bulk_sync_execute(
[](size_t idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = shared_arg[idx];
},
shape,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
assert(std::vector<int>(10, 13) == result);
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#include <iostream>
#include <cassert>
#include <algorithm>
#include <agency/container/vector.hpp>
void test_range_erase()
{
using namespace agency;
{
// test erase empty vector
vector<int> v;
auto iterator = v.erase(v.begin(), v.end());
assert(iterator == v.end());
assert(iterator == v.begin());
assert(v.empty());
}
{
// test erase entire non-empty vector
vector<int> v(10);
auto iterator = v.erase(v.begin(), v.end());
assert(iterator == v.end());
assert(iterator == v.begin());
assert(v.empty());
}
{
// test erase middle of non-empty vector
vector<int> v(10);
std::iota(v.begin(), v.end(), 0);
size_t num_elements_before_erase = v.size() / 2;
size_t num_elements_after_erase = v.size() - num_elements_before_erase;
auto erase_first = v.begin() + num_elements_before_erase;
auto erase_last = v.end() - num_elements_after_erase;
size_t num_elements_erased = erase_last - erase_first;
auto result = v.erase(erase_first, erase_last);
assert(result == v.begin() + num_elements_before_erase);
assert(v.size() == num_elements_before_erase + num_elements_after_erase);
std::vector<int> elements_before_erase(num_elements_before_erase);
std::iota(elements_before_erase.begin(), elements_before_erase.end(), 0);
assert(std::equal(elements_before_erase.begin(), elements_before_erase.end(), v.begin()));
std::vector<int> elements_after_erase(num_elements_after_erase);
std::iota(elements_after_erase.begin(), elements_after_erase.end(), num_elements_before_erase + num_elements_erased);
assert(std::equal(elements_after_erase.begin(), elements_after_erase.end(), result));
}
}
template<class ExecutionPolicy>
void test_range_erase(ExecutionPolicy policy)
{
using namespace agency;
{
// test erase empty vector
vector<int> v;
auto iterator = v.erase(policy, v.begin(), v.end());
assert(iterator == v.end());
assert(iterator == v.begin());
assert(v.empty());
}
{
// test erase entire non-empty vector
vector<int> v(10);
auto iterator = v.erase(policy, v.begin(), v.end());
assert(iterator == v.end());
assert(iterator == v.begin());
assert(v.empty());
}
{
// test erase middle of non-empty vector
vector<int> v(10);
std::iota(v.begin(), v.end(), 0);
size_t num_elements_before_erase = v.size() / 2;
size_t num_elements_after_erase = v.size() - num_elements_before_erase;
auto erase_first = v.begin() + num_elements_before_erase;
auto erase_last = v.end() - num_elements_after_erase;
size_t num_elements_erased = erase_last - erase_first;
auto result = v.erase(policy, erase_first, erase_last);
assert(result == v.begin() + num_elements_before_erase);
assert(v.size() == num_elements_before_erase + num_elements_after_erase);
std::vector<int> elements_before_erase(num_elements_before_erase);
std::iota(elements_before_erase.begin(), elements_before_erase.end(), 0);
assert(std::equal(elements_before_erase.begin(), elements_before_erase.end(), v.begin()));
std::vector<int> elements_after_erase(num_elements_after_erase);
std::iota(elements_after_erase.begin(), elements_after_erase.end(), num_elements_before_erase + num_elements_erased);
assert(std::equal(elements_after_erase.begin(), elements_after_erase.end(), result));
}
}
int main()
{
test_range_erase();
test_range_erase(agency::seq);
test_range_erase(agency::par);
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <agency/experimental/ranges/all.hpp>
#include <agency/experimental/bounded_integer.hpp>
#include <iterator>
namespace agency
{
namespace experimental
{
// XXX in c++17, the type of bound should be auto
template<class Range, std::size_t bound>
class statically_bounded_view
{
private:
using base_type = agency::experimental::all_t<Range>;
public:
using value_type = range_value_t<base_type>;
using reference = range_reference_t<base_type>;
using difference_type = range_difference_t<base_type>;
using iterator = range_iterator_t<base_type>;
using sentinel = range_sentinel_t<base_type>;
// note the special size_type
using size_type = bounded_size_t<bound>;
__AGENCY_ANNOTATION
statically_bounded_view() = default;
__AGENCY_ANNOTATION
statically_bounded_view(const statically_bounded_view&) = default;
template<class OtherRange,
__AGENCY_REQUIRES(
std::is_constructible<
base_type,
all_t<OtherRange&&>
>::value
)>
__AGENCY_ANNOTATION
statically_bounded_view(OtherRange&& other)
: base_(all(std::forward<OtherRange>(other)))
{}
static constexpr std::size_t static_bound = bound;
__AGENCY_ANNOTATION
static constexpr size_type max_size()
{
return bounded_size_t<bound>(bound);
}
__AGENCY_ANNOTATION
size_type size() const
{
return size_type(base_.size());
}
__AGENCY_ANNOTATION
iterator begin() const
{
return base_.begin();
}
__AGENCY_ANNOTATION
sentinel end() const
{
return base_.end();
}
__AGENCY_ANNOTATION
reference operator()(size_type n) const
{
return base_[n.value()];
}
private:
base_type base_;
};
template<class Range, std::size_t bound>
statically_bounded_view<Range,bound> all(const statically_bounded_view<Range,bound>& rng)
{
return rng;
}
// XXX in c++17, the type of bound should be auto
template<std::size_t bound, class Range>
__AGENCY_ANNOTATION
statically_bounded_view<Range,bound> statically_bounded(Range&& rng)
{
return statically_bounded_view<Range,bound>(std::forward<Range>(rng));
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/customization_points/future_cast.hpp>
#include <agency/execution/executor/customization_points/bulk_async_execute.hpp>
#include <agency/execution/executor/detail/utility/invoke_functors.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/factory.hpp>
#include <agency/future.hpp>
namespace agency
{
namespace detail
{
__agency_exec_check_disable__
template<class E, class Function, class... Factories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
executor_future_t<E,void> bulk_async_execute_with_void_result(E& exec, Function f, executor_shape_t<E> shape, Factories... factories)
{
// wrap f in a functor that will ignore the unit object we pass to it
ignore_unit_result_parameter_and_invoke<Function> g{f};
// just call bulk_async() and use a result factory that creates a unit object which can be easily discarded
executor_future_t<E,unit> intermediate_future = agency::bulk_async_execute(exec, g, shape, unit_factory(), factories...);
// cast the intermediate_future to void
return agency::future_cast<void>(exec, intermediate_future);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/tuple/arithmetic_tuple_facade.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/make_tuple_if_not_scoped.hpp>
namespace agency
{
namespace detail
{
// shape_tuple can't just be an alias for a particular kind of tuple
// because it also requires arithmetic operators
template<class... Shapes>
class shape_tuple :
public agency::tuple<Shapes...>,
public arithmetic_tuple_facade<shape_tuple<Shapes...>>
{
public:
using agency::tuple<Shapes...>::tuple;
};
template<class ExecutionCategory1,
class ExecutionCategory2,
class Shape1,
class Shape2>
struct scoped_shape
{
using type = decltype(
agency::tuple_cat(
detail::make_tuple_if_not_scoped<ExecutionCategory1>(std::declval<Shape1>()),
detail::make_tuple_if_not_scoped<ExecutionCategory2>(std::declval<Shape2>())
)
);
};
template<class ExecutionCategory1,
class ExecutionCategory2,
class Shape1,
class Shape2>
using scoped_shape_t = typename scoped_shape<
ExecutionCategory1,
ExecutionCategory2,
Shape1,
Shape2
>::type;
template<class ExecutionCategory1,
class ExecutionCategory2,
class Shape1,
class Shape2>
__AGENCY_ANNOTATION
scoped_shape_t<ExecutionCategory1,ExecutionCategory2,Shape1,Shape2> make_scoped_shape(const Shape1& outer_shape, const Shape2& inner_shape)
{
return agency::tuple_cat(
detail::make_tuple_if_not_scoped<ExecutionCategory1>(outer_shape),
detail::make_tuple_if_not_scoped<ExecutionCategory2>(inner_shape)
);
}
} // end detail
} // end agency
namespace std
{
template<class... Shapes>
class tuple_size<agency::detail::shape_tuple<Shapes...>> : public std::tuple_size<agency::tuple<Shapes...>> {};
template<size_t i, class... Shapes>
class tuple_element<i,agency::detail::shape_tuple<Shapes...>> : public std::tuple_element<i,agency::tuple<Shapes...>> {};
} // end namespace std
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/container/array.hpp>
#include <agency/container/vector.hpp>
#include <agency/cuda/detail/feature_test.hpp>
#include <agency/cuda/detail/terminate.hpp>
#include <vector>
#include <algorithm>
namespace agency
{
namespace cuda
{
class device_id;
namespace detail
{
__host__ __device__
void set_current_device(const device_id& d);
__host__ __device__
device_id current_device();
// the CUDA Runtime's current device becomes the given device
// for as long as this object is in scope
class scoped_current_device
{
public:
__host__ __device__
scoped_current_device(const device_id& new_device);
__host__ __device__
~scoped_current_device();
private:
int old_device;
};
template<class T>
struct is_range_of_device_id_impl
{
// this is hacky but sufficient for our purposes
template<class U,
// get U's iterator type
class Iterator = decltype(std::declval<U>().begin()),
// get U's sentinel type
class Sentinel = decltype(std::declval<U>().end()),
// get iterator's value_type
class ValueType = typename std::iterator_traits<Iterator>::value_type,
// ValueType should be device_id
class Result = std::is_same<ValueType, device_id>
>
static Result test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
using is_range_of_device_id = typename is_range_of_device_id_impl<T>::type;
} // end detail
class device_id
{
public:
typedef int native_handle_type;
__host__ __device__
device_id(native_handle_type handle)
: handle_(handle)
{}
// default constructor creates a device_id which represents no device
__host__ __device__
device_id()
: device_id(-1)
{}
// XXX std::this_thread::native_handle() is not const -- why?
__host__ __device__
native_handle_type native_handle() const
{
return handle_;
}
void wait() const
{
detail::scoped_current_device temporary_device(*this);
#if __cuda_lib_has_cudart
detail::throw_on_error(cudaDeviceSynchronize(), "cuda::device_id::wait(): cudaDeviceSynchronize()");
#endif
}
__host__ __device__
friend inline bool operator==(device_id lhs, const device_id& rhs)
{
return lhs.handle_ == rhs.handle_;
}
__host__ __device__
friend inline bool operator!=(device_id lhs, device_id rhs)
{
return lhs.handle_ != rhs.handle_;
}
__host__ __device__
friend inline bool operator<(device_id lhs, device_id rhs)
{
return lhs.handle_ < rhs.handle_;
}
__host__ __device__
friend inline bool operator<=(device_id lhs, device_id rhs)
{
return lhs.handle_ <= rhs.handle_;
}
__host__ __device__
friend inline bool operator>(device_id lhs, device_id rhs)
{
return lhs.handle_ > rhs.handle_;
}
__host__ __device__
friend inline bool operator>=(device_id lhs, device_id rhs)
{
return lhs.handle_ >= rhs.handle_;
}
friend std::ostream& operator<<(std::ostream &os, const device_id& id)
{
return os << id.native_handle();
}
private:
native_handle_type handle_;
};
// device() is included for symmetry with devices()
// note that when an integer is passed as a parameter to device(d),
// it will be automatically converted into a device_id
__AGENCY_ANNOTATION
device_id device(device_id d)
{
return d;
}
template<class... IntegersOrDeviceIds>
__AGENCY_ANNOTATION
array<device_id, 1 + sizeof...(IntegersOrDeviceIds)> devices(device_id id0, IntegersOrDeviceIds... ids)
{
return {{id0, device_id(ids)...}};
}
template<class Range,
__AGENCY_REQUIRES(
!std::is_convertible<const Range&, device_id>::value
)>
__AGENCY_ANNOTATION
vector<device_id> devices(const Range& integers_or_device_ids)
{
vector<device_id> result(integers_or_device_ids.size());
for(size_t i = 0; i < integers_or_device_ids.size(); ++i)
{
result[i] = device_id(integers_or_device_ids[i]);
}
return result;
}
__AGENCY_ANNOTATION
vector<device_id> all_devices()
{
vector<device_id> result;
#if __cuda_lib_has_cudart
int device_count = 0;
detail::throw_on_error(cudaGetDeviceCount(&device_count), "cuda::all_devices(): cudaGetDeviceCount()");
result.reserve(static_cast<size_t>(device_count));
for(int i = 0; i < device_count; ++i)
{
result.push_back(device_id(i));
}
#endif
return result;
}
namespace detail
{
__host__ __device__
void set_current_device(const device_id& d)
{
#if __cuda_lib_has_cudart
#ifndef __CUDA_ARCH__
throw_on_error(cudaSetDevice(d.native_handle()), "cuda::detail::set_current_device(): cudaSetDevice()");
#else
if(d != current_device())
{
detail::throw_on_error(cudaErrorNotSupported, "cuda::detail::set_current_device(): Unable to set a different device in __device__ code.");
}
#endif // __CUDA_ARCH__
#endif // __cuda_lib_has_cudart
}
__host__ __device__
device_id current_device()
{
int result = -1;
#if __cuda_lib_has_cudart
throw_on_error(cudaGetDevice(&result), "cuda::detail::current_device(): cudaGetDevice()");
#endif
return device_id(result);
}
__host__ __device__
scoped_current_device::scoped_current_device(const device_id& new_device)
: old_device(detail::current_device().native_handle())
{
detail::set_current_device(new_device);
}
__host__ __device__
scoped_current_device::~scoped_current_device()
{
detail::set_current_device(device_id(old_device));
}
template<class Container>
void wait(const Container& devices)
{
for(auto& d : devices)
{
d.wait();
}
}
bool has_concurrent_managed_access(const device_id& device)
{
int result = 0;
#if __cuda_lib_has_cudart
throw_on_error(cudaDeviceGetAttribute(&result, cudaDevAttrConcurrentManagedAccess, device.native_handle()), "cuda::detail::has_concurrent_managed_access(): cudaDeviceGetAttribute()");
#endif
return result;
}
template<class Container>
void wait_if_any_lack_concurrent_managed_access(const Container& devices)
{
// if not all of the devices have concurrent managed access...
if(!std::all_of(devices.begin(), devices.end(), has_concurrent_managed_access))
{
// then wait for all of the devices to become idle
wait(devices);
}
}
__host__ __device__
size_t number_of_multiprocessors(const device_id& d)
{
#if __cuda_lib_has_cudart
int attr = 0;
throw_on_error(cudaDeviceGetAttribute(&attr, cudaDevAttrMultiProcessorCount, d.native_handle()), "cuda::detail::number_of_multiprocessors(): cudaDeviceGetAttribute()");
return static_cast<size_t>(attr);
#else
throw_on_error(cudaErrorNotSupported, "cuda::detail::number_of_multiprocessors(): cudaDeviceGetAttribute() requires CUDART");
return 0;
#endif
}
__host__ __device__
size_t maximum_grid_size_x(const device_id& d)
{
#if __cuda_lib_has_cudart
int attr = 0;
throw_on_error(cudaDeviceGetAttribute(&attr, cudaDevAttrMaxGridDimX, d.native_handle()), "cuda::detail::maximum_grid_size_x(): cudaDeviceGetAttribute()");
return static_cast<size_t>(attr);
#else
throw_on_error(cudaErrorNotSupported, "cuda::detail::maximum_grid_size_x(): cudaDeviceGetAttribute() requires CUDART");
return 0;
#endif
}
__host__ __device__
size_t maximum_block_size_x(const device_id& d)
{
#if __cuda_lib_has_cudart
int attr = 0;
throw_on_error(cudaDeviceGetAttribute(&attr, cudaDevAttrMaxBlockDimX, d.native_handle()), "cuda::detail::maximum_block_size_x(): cudaDeviceGetAttribute()");
return static_cast<size_t>(attr);
#else
throw_on_error(cudaErrorNotSupported, "cuda::detail::maximum_block_size_x(): cudaDeviceGetAttribute() requires CUDART");
return 0;
#endif
}
void ensure_context_is_initialized(const device_id& d)
{
detail::scoped_current_device scope(d);
cudaFree(0);
}
template<class Container>
void ensure_contexts_are_initialized(const Container& devices)
{
for(auto& d : devices)
{
ensure_context_is_initialized(d);
}
}
void ensure_all_contexts_are_initialized()
{
ensure_contexts_are_initialized(cuda::all_devices());
}
} // end detail
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/container/array.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <cstddef>
namespace agency
{
namespace experimental
{
constexpr std::ptrdiff_t dynamic_extent = -1;
namespace detail
{
template<std::ptrdiff_t Extent>
class span_base
{
public:
__AGENCY_ANNOTATION
span_base(std::ptrdiff_t)
{
}
__AGENCY_ANNOTATION
std::ptrdiff_t size() const
{
return Extent;
}
};
template<>
class span_base<dynamic_extent>
{
public:
__AGENCY_ANNOTATION
span_base(std::ptrdiff_t size)
: size_(size)
{
}
__AGENCY_ANNOTATION
std::ptrdiff_t size() const
{
return size_;
}
private:
std::ptrdiff_t size_;
};
} // end detail
// see http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2016/p0122r1.pdf
template<class ElementType, std::ptrdiff_t Extent = dynamic_extent>
class span : private detail::span_base<Extent>
{
private:
using super_t = detail::span_base<Extent>;
public:
using element_type = ElementType;
using index_type = std::ptrdiff_t;
using pointer = element_type*;
using reference = element_type&;
using iterator = pointer;
constexpr static index_type extent = Extent;
__AGENCY_ANNOTATION
span() : span(nullptr) {}
__AGENCY_ANNOTATION
explicit span(std::nullptr_t) : span(nullptr, index_type{0}) {}
__AGENCY_ANNOTATION
span(pointer ptr, index_type count)
: super_t(ptr ? count : 0),
data_(ptr)
{}
__AGENCY_ANNOTATION
span(pointer first, pointer last) : span(first, last - first) {}
template<size_t N>
__AGENCY_ANNOTATION
span(element_type (&arr)[N]) : span(arr, N) {}
template<size_t N>
__AGENCY_ANNOTATION
span(array<typename std::remove_const<element_type>::type,N>& arr) : span(arr, N) {}
template<size_t N>
__AGENCY_ANNOTATION
span(const array<typename std::remove_const<element_type>::type,N>& arr) : span(arr, N) {}
// XXX should require iterator contiguity, but that requires contiguous_iterator_tag
__agency_exec_check_disable__
template<class Container,
class BeginPointer = decltype(&*std::declval<Container>().begin()),
class EndPointer = decltype(&*std::declval<Container>().end()),
class = typename std::enable_if<
std::is_convertible<BeginPointer,pointer>::value &&
std::is_convertible<EndPointer, pointer>::value
>::type
>
__AGENCY_ANNOTATION
span(Container&& c)
: span(&*c.begin(), &*c.end())
{}
__AGENCY_ANNOTATION
index_type size() const
{
return super_t::size();
}
__AGENCY_ANNOTATION
pointer data()
{
return data_;
}
__AGENCY_ANNOTATION
pointer data() const
{
return data_;
}
__AGENCY_ANNOTATION
iterator begin() const
{
return data();
}
__AGENCY_ANNOTATION
iterator end() const
{
return begin() + size();
}
__AGENCY_ANNOTATION
reference operator[](index_type idx) const
{
return begin()[idx];
}
__AGENCY_ANNOTATION
span<element_type, dynamic_extent> subspan(index_type offset, index_type count = dynamic_extent) const
{
return span<element_type, dynamic_extent>(data() + offset, count);
}
private:
pointer data_;
};
template<class T, std::ptrdiff_t Extent>
__AGENCY_ANNOTATION
bool operator==(const span<T,Extent>& lhs, const span<T,Extent>& rhs)
{
if(lhs.size() != rhs.size()) return false;
for(auto i = 0; i < lhs.size(); ++i)
{
if(lhs[i] != rhs[i]) return false;
}
return true;
}
// specialize range_cardinality for span<T,Extent>
template<class Range>
struct range_cardinality;
template<class T>
struct range_cardinality<span<T>> : std::integral_constant<cardinality, finite> {};
template<class T, std::ptrdiff_t Extent>
struct range_cardinality<span<T,Extent>> : std::integral_constant<cardinality, static_cast<cardinality>(Extent)> {};
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <utility>
#include <memory>
namespace agency
{
namespace detail
{
// allocator_adaptor adapts a memory resource that allocates bytes into an allocator that
// allocates objects
template<class T, class MemoryResource>
class allocator_adaptor : private MemoryResource
{
private:
using super_t = MemoryResource;
public:
using value_type = T;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
allocator_adaptor() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
allocator_adaptor(const allocator_adaptor&) = default;
__agency_exec_check_disable__
template<class U>
__AGENCY_ANNOTATION
allocator_adaptor(const allocator_adaptor<U,MemoryResource>& other)
: super_t(other.resource())
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
allocator_adaptor(const MemoryResource& resource)
: super_t(resource)
{}
__agency_exec_check_disable__
template<class U = T, __AGENCY_REQUIRES(!std::is_void<U>::value)>
__AGENCY_ANNOTATION
value_type *allocate(size_t n)
{
return reinterpret_cast<value_type*>(super_t::allocate(n * sizeof(T)));
}
__agency_exec_check_disable__
template<class U = T, __AGENCY_REQUIRES(!std::is_void<U>::value)>
__AGENCY_ANNOTATION
void deallocate(value_type* ptr, size_t n)
{
super_t::deallocate(ptr, n * sizeof(T));
}
__AGENCY_ANNOTATION
const MemoryResource& resource() const
{
return *this;
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bool operator==(const allocator_adaptor& other) const
{
return static_cast<const MemoryResource&>(*this) == static_cast<const MemoryResource&>(other);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bool operator!=(const allocator_adaptor& other) const
{
return static_cast<const MemoryResource&>(*this) != static_cast<const MemoryResource&>(other);
}
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/execution/executor/executor_traits/detail/executor_execution_depth_or.hpp>
#include <agency/execution/executor/executor_traits/detail/member_shape_type_or.hpp>
#include <agency/execution/executor/executor_traits/detail/member_future_or.hpp>
#include <agency/execution/executor/executor_traits/detail/member_index_type_or.hpp>
#include <future>
#include <type_traits>
#include <utility>
namespace agency
{
namespace detail
{
template<class Executor, class Function, class Shape,
class Future,
class ResultFactory,
class... SharedFactories
>
struct has_bulk_then_execute_impl
{
using result_type = result_of_t<ResultFactory()>;
using expected_future_type = member_future_or_t<Executor,result_type,std::future>;
template<class Executor1,
class ReturnType = decltype(
std::declval<Executor1>().bulk_then_execute(
std::declval<Function>(),
std::declval<Shape>(),
std::declval<Future&>(),
std::declval<ResultFactory>(),
std::declval<SharedFactories>()...
)
),
class = typename std::enable_if<
std::is_same<ReturnType,expected_future_type>::value
>::type>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class Executor, class Function, class Shape,
class Future,
class ResultFactory,
class... SharedFactories
>
using has_bulk_then_execute = typename has_bulk_then_execute_impl<Executor, Function, Shape, Future, ResultFactory, SharedFactories...>::type;
template<class T, class IndexSequence>
struct is_bulk_continuation_executor_impl;
template<class T, size_t... Indices>
struct is_bulk_continuation_executor_impl<T, index_sequence<Indices...>>
{
// executor properties
using shape_type = member_shape_type_or_t<T,size_t>;
using index_type = member_index_type_or_t<T,shape_type>;
// types related to functions passed to .bulk_then_execute()
using result_type = int;
using predecessor_type = int;
using predecessor_future_type = member_future_or_t<T,predecessor_type,std::future>;
template<size_t>
using shared_type = int;
// the functions we'll pass to .bulk_then_execute() to test
// XXX WAR nvcc 8.0 bug
//using test_function = std::function<void(index_type, predecessor_type&, result_type&, shared_type<Indices>&...)>;
//using test_result_factory = std::function<result_type()>;
struct test_function
{
void operator()(index_type, predecessor_type&, result_type&, shared_type<Indices>&...);
};
struct test_result_factory
{
result_type operator()();
};
// XXX WAR nvcc 8.0 bug
//template<size_t I>
//using test_shared_factory = std::function<shared_type<I>()>;
template<size_t I>
struct test_shared_factory
{
shared_type<I> operator()();
};
using type = has_bulk_then_execute<
T,
test_function,
shape_type,
predecessor_future_type,
test_result_factory,
test_shared_factory<Indices>...
>;
};
} // end detail
template<class T>
using is_bulk_continuation_executor = typename detail::is_bulk_continuation_executor_impl<
T,
detail::make_index_sequence<
detail::executor_execution_depth_or<T>::value
>
>::type;
namespace detail
{
// a fake Concept to use with __AGENCY_REQUIRES
template<class T>
constexpr bool BulkContinuationExecutor()
{
return is_bulk_continuation_executor<T>();
}
} // end detail
} // end agency
<file_sep>/// \file
/// \brief Include this file to use bulk_invoke().
///
#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/control_structures/bulk_invoke_execution_policy.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/detail/control_structures/is_bulk_call_possible_via_execution_policy.hpp>
#include <agency/execution/execution_agent.hpp>
namespace agency
{
namespace detail
{
template<bool enable, class ExecutionPolicy, class Function, class... Args>
struct enable_if_bulk_invoke_execution_policy_impl {};
template<class ExecutionPolicy, class Function, class... Args>
struct enable_if_bulk_invoke_execution_policy_impl<true, ExecutionPolicy, Function, Args...>
{
using type = bulk_invoke_execution_policy_result_t<ExecutionPolicy,Function,Args...>;
};
template<class ExecutionPolicy, class Function, class... Args>
struct enable_if_bulk_invoke_execution_policy
: enable_if_bulk_invoke_execution_policy_impl<
is_bulk_call_possible_via_execution_policy<decay_t<ExecutionPolicy>,Function,Args...>::value,
decay_t<ExecutionPolicy>,
Function,
Args...
>
{};
} // end detail
///
/// \defgroup control_structures Control Structures
/// \brief Control structures create execution.
///
///
/// The primary way Agency programs create execution is by invoking a
/// **control structure**. Control structures are functions invoked via
/// composition with an **execution policy**. Execution policies
/// parameterize control structures by describing the properties of the
/// requested execution.
///
/// For example, the following code snipped uses the bulk_invoke() control
/// structure with the \ref par execution policy to require the parallel execution
/// of ten invocations of a lambda function:
///
/// ~~~~{.cpp}
/// using namespace agency;
///
/// bulk_invoke(par(10), [](parallel_agent& self)
/// {
/// // task body here
/// ...
/// });
/// ~~~~
/// \brief Creates a bulk synchronous invocation.
/// \ingroup control_structures
///
///
/// `bulk_invoke` is a control structure which creates a group of function invocations with forward progress ordering as required by an execution policy.
/// The results of these invocations, if any, are collected into a container and returned as bulk_invoke's result.
///
/// `bulk_invoke` creates a group of function invocations of size `N`, and each invocation i in `[0,N)` has the following form:
///
/// result_i = f(agent_i, arg_i_1, arg_i_2, ..., arg_i_M)
///
/// `agent_i` is a reference to an **execution agent** which identifies the ith invocation within the group.
/// The parameter `arg_i_j` depends on the `M` arguments `arg_j` passed to `bulk_invoke`:
/// * If `arg_j` is a **shared parameter**, then it is a reference to an object shared among all execution agents in `agent_i`'s group.
/// * Otherwise, `arg_i_j` is a copy of argument `arg_j`.
///
/// If the invocations of `f` do not return `void`, these results are collected and returned in a container `results`, whose type is implementation-defined.
/// If invocation i returns `result_i`, and this invocation's `agent_i` has index `idx_i`, then `results[idx_i]` yields `result_i`.
///
/// \param policy An execution policy describing the requirements of the execution agents created by this call to `bulk_invoke`.
/// \param f A function defining the work to be performed by execution agents.
/// \param args Additional arguments to pass to `f` when it is invoked.
/// \return `void`, if `f` has no result; otherwise, a container of `f`'s results indexed by the execution agent which produced them.
///
/// \tparam ExecutionPolicy This type must fulfill the requirements of `ExecutionPolicy`.
/// \tparam Function `Function`'s first parameter type must be `ExecutionPolicy::execution_agent_type&`.
/// The types of its additional parameters must match `Args...`.
/// \tparam Args Each type in `Args...` must match the type of the corresponding parameter of `Function`.
///
/// The following example demonstrates how to use `bulk_invoke` to print 10 "Hello, world" messages in sequence.
///
/// \include hello_lambda.cpp
///
/// Messages from agents 0 through 9 are printed in sequential order:
///
/// ~~~~
/// $ clang -std=c++11 -I. -lstdc++ -pthread examples/hello_lambda.cpp -o hello_lambda
/// $ ./hello_lambda
/// Hello, world from agent 0
/// Hello, world from agent 1
/// Hello, world from agent 2
/// Hello, world from agent 3
/// Hello, world from agent 4
/// Hello, world from agent 5
/// Hello, world from agent 6
/// Hello, world from agent 7
/// Hello, world from agent 8
/// Hello, world from agent 9
/// ~~~~
///
/// Changing the execution policy used in the call to `bulk_invoke` changes how and where the execution agents
/// will execute the provided function. This example demonstrates how to use `bulk_invoke` with `par` to execute
/// the SAXPY operation in parallel:
///
/// \include saxpy.cpp
///
/// Remember to include optimization (`-O3`, in this example) to execute fast:
///
/// $ clang -std=c++11 -I. -lstdc++ -pthread -O3 examples/saxpy.cpp -o saxpy
/// $ ./saxpy
/// OK
///
/// \see bulk_async
/// \see bulk_then
template<class ExecutionPolicy, class Function, class... Args>
__AGENCY_ANNOTATION
#ifndef DOXYGEN_SHOULD_SKIP_THIS
typename detail::enable_if_bulk_invoke_execution_policy<
ExecutionPolicy, Function, Args...
>::type
#else
see_below
#endif
bulk_invoke(ExecutionPolicy&& policy, Function f, Args&&... args)
{
using agent_traits = execution_agent_traits<typename std::decay<ExecutionPolicy>::type::execution_agent_type>;
const size_t num_shared_params = detail::execution_depth<typename agent_traits::execution_category>::value;
return detail::bulk_invoke_execution_policy(detail::index_sequence_for<Args...>(), detail::make_index_sequence<num_shared_params>(), policy, f, std::forward<Args>(args)...);
}
} // end agency
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/execution/executor/vector_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
int main()
{
using namespace agency;
static_assert(is_bulk_synchronous_executor<vector_executor>::value,
"vector_executor should be a bulk synchronous executor");
static_assert(is_bulk_executor<vector_executor>::value,
"vector_executor should be a bulk executor");
static_assert(detail::is_detected_exact<unsequenced_execution_tag, executor_execution_category_t, vector_executor>::value,
"vector_executor should have unsequenced_execution_tag execution_category");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, vector_executor>::value,
"vector_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, vector_executor>::value,
"vector_executor should have size_t index_type");
static_assert(detail::is_detected_exact<std::future<int>, executor_future_t, vector_executor, int>::value,
"vector_executor should have std::future furture");
static_assert(executor_execution_depth<vector_executor>::value == 1,
"vector_executor should have execution_depth == 1");
vector_executor exec;
size_t shape = 10;
auto result = exec.bulk_sync_execute(
[](size_t idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = shared_arg[idx];
},
shape,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
assert(std::vector<int>(10, 13) == result);
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points/async_execute.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_one_shared_parameter.hpp>
namespace agency
{
// this case handles executors which have .sync_execute()
__agency_exec_check_disable__
template<class E, class Function,
__AGENCY_REQUIRES(detail::SynchronousExecutor<E>())
>
__AGENCY_ANNOTATION
detail::result_of_t<detail::decay_t<Function>()>
sync_execute(E& exec, Function&& f)
{
return exec.sync_execute(std::forward<Function>(f));
}
// this case handles executors which are SimpleExecutors but do not have .sync_execute()
__agency_exec_check_disable__
template<class E, class Function,
__AGENCY_REQUIRES(!detail::SynchronousExecutor<E>()),
__AGENCY_REQUIRES(detail::SimpleExecutor<E>())
>
__AGENCY_ANNOTATION
detail::result_of_t<detail::decay_t<Function>()>
sync_execute(E& exec, Function&& f)
{
return agency::async_execute(exec, std::forward<Function>(f)).get();
}
namespace detail
{
template<class Function>
struct sync_execute_functor
{
mutable Function f;
template<class Index, class Result>
__AGENCY_ANNOTATION
void operator()(const Index&, Result& result, unit) const
{
result = invoke_and_return_unit_if_void_result(f);
}
};
} // end detail
// this case handles executors which have no way to create single-agent synchrony
__agency_exec_check_disable__
template<class E, class Function,
__AGENCY_REQUIRES(!detail::SimpleExecutor<E>()),
__AGENCY_REQUIRES(detail::BulkExecutor<E>())>
__AGENCY_ANNOTATION
detail::result_of_t<detail::decay_t<Function>()>
sync_execute(E& exec, Function f)
{
using result_of_function = detail::result_of_t<Function()>;
// if f returns void, then return a unit from bulk_sync_execute()
using result_type = typename std::conditional<
std::is_void<result_of_function>::value,
detail::unit,
result_of_function
>::type;
// XXX should really move f into this functor, but it's not clear how to make move-only
// parameters to CUDA kernels
auto execute_me = detail::sync_execute_functor<Function>{f};
using shape_type = executor_shape_t<E>;
// call bulk_sync_execute() and cast to the expected result, which handles void result
return static_cast<result_of_function>(agency::detail::bulk_sync_execute_with_one_shared_parameter(exec,
execute_me, // the functor to execute
detail::shape_cast<shape_type>(1), // create only a single agent
detail::construct<result_type>(), // a factory for creating f's result
detail::unit_factory() // a factory for creating a unit shared parameter which execute_me will ignore
));
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/algorithm/copy_n.hpp>
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/has_member.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/type_list.hpp>
#include <type_traits>
#include <utility>
namespace agency
{
namespace detail
{
__DEFINE_HAS_MEMBER_TYPE(has_param_type, param_type);
__DEFINE_HAS_MEMBER_TYPE(has_shared_param_type, shared_param_type);
__DEFINE_HAS_MEMBER_TYPE(has_inner_execution_agent_type, inner_execution_agent_type);
template<class ExecutionAgent, class Enable = void>
struct execution_agent_type_list
{
using type = type_list<ExecutionAgent>;
};
template<class ExecutionAgent>
struct execution_agent_type_list<
ExecutionAgent,
typename std::enable_if<
has_inner_execution_agent_type<ExecutionAgent>::value
>::type
>
{
using type = typename type_list_prepend<
ExecutionAgent,
typename execution_agent_type_list<
typename ExecutionAgent::inner_execution_agent_type
>::type
>::type;
};
template<class ExecutionAgent, class Enable = void>
struct execution_agent_traits_base
{
};
template<class ExecutionAgent>
struct execution_agent_traits_base<
ExecutionAgent,
typename std::enable_if<
has_inner_execution_agent_type<ExecutionAgent>::type
>::type
>
{
using inner_execution_agent_type = typename ExecutionAgent::inner_execution_agent_type;
};
// derive from ExecutionAgent to get access to ExecutionAgent's constructor
template<class ExecutionAgent>
struct agent_access_helper : public ExecutionAgent
{
template<class... Args>
__AGENCY_ANNOTATION
agent_access_helper(Args&&... args)
: ExecutionAgent(std::forward<Args>(args)...)
{}
};
// make_agent() is a helper function used by execution_agent_traits and execution_group. its job is to simplify the job of creating an
// execution agent by calling its constructor
// make_agent() forwards index & param and filters out ignored shared parameters when necessary
// in other words, when shared_param is ignore_t, it doesn't pass shared_param to the agent's constructor
// in other cases, it forwards along the shared_param
template<class ExecutionAgent, class Index, class Param>
__AGENCY_ANNOTATION
ExecutionAgent make_agent(const Index& index,
const Param& param)
{
return agent_access_helper<ExecutionAgent>(index, param);
}
template<class ExecutionAgent, class Index, class Param>
__AGENCY_ANNOTATION
static ExecutionAgent make_flat_agent(const Index& index,
const Param& param,
detail::ignore_t)
{
return make_agent<ExecutionAgent>(index, param);
}
template<class ExecutionAgent, class Index, class Param, class SharedParam>
__AGENCY_ANNOTATION
static ExecutionAgent make_flat_agent(const Index& index,
const Param& param,
SharedParam& shared_param)
{
return agent_access_helper<ExecutionAgent>(index, param, shared_param);
}
template<class ExecutionAgent, class Index, class Param, class SharedParam>
__AGENCY_ANNOTATION
static ExecutionAgent make_agent(const Index& index,
const Param& param,
SharedParam& shared_param)
{
return make_flat_agent<ExecutionAgent>(index, param, shared_param);
}
template<class ExecutionAgent, class Index, class Param, class SharedParam1, class SharedParam2, class... SharedParams>
__AGENCY_ANNOTATION
static ExecutionAgent make_agent(const Index& index,
const Param& param,
SharedParam1& shared_param1,
SharedParam2& shared_param2,
SharedParams&... shared_params)
{
return agent_access_helper<ExecutionAgent>(index, param, shared_param1, shared_param2, shared_params...);
}
} // end detail
template<class ExecutionAgent>
struct execution_agent_traits : detail::execution_agent_traits_base<ExecutionAgent>
{
using execution_agent_type = ExecutionAgent;
using execution_category = typename execution_agent_type::execution_category;
// XXX we should probably use execution_agent_type::index_type if it exists,
// if not, use the type of the result of .index()
using index_type = detail::decay_t<
decltype(
std::declval<execution_agent_type>().index()
)
>;
using size_type = detail::decay_t<
decltype(
std::declval<execution_agent_type>().group_size()
)
>;
private:
template<class T>
struct execution_agent_param
{
using type = typename T::param_type;
};
public:
using param_type = typename detail::lazy_conditional<
detail::has_param_type<execution_agent_type>::value,
execution_agent_param<execution_agent_type>,
detail::identity<size_type>
>::type;
// XXX what should we do if ExecutionAgent::domain(param) does not exist?
// default should be lattice<index_type>, but by what process should we eventually
// arrive at that default?
// XXX yank the general implementation from execution_group now that param_type::inner() exists
__agency_exec_check_disable__
__AGENCY_ANNOTATION
static auto domain(const param_type& param)
-> decltype(ExecutionAgent::domain(param))
{
return ExecutionAgent::domain(param);
}
using domain_type = decltype(domain(std::declval<param_type>()));
template<class Function>
__AGENCY_ANNOTATION
static detail::result_of_t<Function(ExecutionAgent&)>
execute(Function f, const index_type& index, const param_type& param)
{
ExecutionAgent agent(index, param);
return f(agent);
}
private:
template<class T>
struct execution_agent_shared_param
{
using type = typename T::shared_param_type;
};
public:
using shared_param_type = typename detail::lazy_conditional<
detail::has_shared_param_type<execution_agent_type>::value,
execution_agent_shared_param<execution_agent_type>,
detail::identity<detail::ignore_t>
>::type;
// XXX we should ensure that the SharedParams are all the right type for each inner execution agent type
// basically, they would be the element types of shared_param_tuple_type
template<class Function, class SharedParam1, class... SharedParams>
__AGENCY_ANNOTATION
static detail::result_of_t<Function(ExecutionAgent&)>
execute(Function f, const index_type& index, const param_type& param, SharedParam1& shared_param1, SharedParams&... shared_params)
{
ExecutionAgent agent = detail::make_agent<ExecutionAgent>(index, param, shared_param1, shared_params...);
return f(agent);
}
};
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <iterator>
namespace agency
{
namespace experimental
{
template<class Iterator, class Difference = typename std::iterator_traits<Iterator>::difference_type>
class counted_view
{
public:
using iterator = Iterator;
using difference_type = Difference;
__AGENCY_ANNOTATION
counted_view(iterator iter, difference_type n)
: begin_(iter),
size_(n)
{}
__AGENCY_ANNOTATION
iterator begin() const
{
return begin_;
}
__AGENCY_ANNOTATION
iterator end() const
{
return begin() + size();
}
__AGENCY_ANNOTATION
difference_type size() const
{
return size_;
}
__AGENCY_ANNOTATION
bool empty() const
{
return size_ == 0;
}
__AGENCY_ANNOTATION
typename std::iterator_traits<iterator>::reference operator[](difference_type i) const
{
return begin()[i];
}
private:
iterator begin_;
difference_type size_;
};
template<class Iterator, class Difference>
__AGENCY_ANNOTATION
counted_view<Iterator,Difference> all(const counted_view<Iterator,Difference>& v)
{
return v;
}
template<class Range>
__AGENCY_ANNOTATION
counted_view<range_iterator_t<Range>,range_difference_t<Range>>
counted(Range&& rng, range_difference_t<Range> n)
{
return counted_view<range_iterator_t<Range>,range_difference_t<Range>>(rng.begin(), n);
}
template<class Difference, class Range>
__AGENCY_ANNOTATION
counted_view<range_iterator_t<Range>,Difference>
counted(Range&& rng, Difference n)
{
return counted_view<range_iterator_t<Range>,Difference>(rng.begin(), n);
}
template<class Difference, class Range>
__AGENCY_ANNOTATION
counted_view<range_iterator_t<Range>,Difference>
counted(Range&& rng, range_difference_t<Range> from, Difference n)
{
return counted_view<range_iterator_t<Range>,Difference>(rng.begin() + from, n);
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/executor_array.hpp>
#include <agency/execution/executor/flattened_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/cuda/execution/executor/grid_executor.hpp>
#include <agency/cuda/execution/executor/parallel_executor.hpp>
#include <numeric>
#include <algorithm>
#include <type_traits>
#include <array>
namespace agency
{
namespace cuda
{
namespace detail
{
template<class Range>
std::vector<grid_executor> devices_to_grid_executors(const Range& devices)
{
std::vector<grid_executor> result(devices.size());
std::transform(devices.begin(), devices.end(), result.begin(), [](const device_id& d)
{
return grid_executor(d);
});
return result;
}
std::vector<grid_executor> all_devices_as_grid_executors()
{
return detail::devices_to_grid_executors(all_devices());
}
} // end detail
class supergrid_executor : public executor_array<grid_executor, this_thread::parallel_executor>
{
private:
using super_t = executor_array<grid_executor, this_thread::parallel_executor>;
public:
supergrid_executor()
: supergrid_executor(detail::all_devices_as_grid_executors())
{}
template<class Range>
supergrid_executor(const Range& grid_executors)
: super_t(grid_executors.begin(), grid_executors.end())
{}
};
class spanning_grid_executor : public flattened_executor<supergrid_executor>
{
private:
using super_t = flattened_executor<supergrid_executor>;
public:
spanning_grid_executor() = default;
template<class Range>
spanning_grid_executor(const Range& grid_executors)
: super_t(supergrid_executor(grid_executors))
{}
};
static_assert(is_executor<spanning_grid_executor>::value, "spanning_grid_executor is not an executor!");
class multidevice_executor : public flattened_executor<spanning_grid_executor>
{
private:
using super_t = flattened_executor<spanning_grid_executor>;
public:
multidevice_executor() = default;
template<class Range, __AGENCY_REQUIRES(!detail::is_range_of_device_id<Range>::value)>
multidevice_executor(const Range& grid_executors)
: super_t(grid_executors)
{}
template<class Range, __AGENCY_REQUIRES(detail::is_range_of_device_id<Range>::value)>
multidevice_executor(const Range& devices)
: multidevice_executor(detail::devices_to_grid_executors(devices))
{}
size_t size() const
{
return this->base_executor().base_executor().size();
}
};
static_assert(is_executor<multidevice_executor>::value, "multidevice_executor is not an executor!");
} // end cuda
} // end agency
<file_sep>/// \file
/// \brief Contains definitions of built-in execution policies.
///
/// \defgroup execution_policies Execution Policies
/// \ingroup execution
/// \brief Execution policies describe requirements for execution.
///
/// Execution policies describe the execution properties of bulk tasks created by control structures such as `bulk_invoke()`.
/// Such properties include both *how* and *where* execution should occur. Forward progress requirements encapsulated by
/// execution policies describe the ordering relationships of individual execution agents comprising a bulk task, while the execution policy's
/// associated *executor* governs where those execution agents execute.
///
/// ### Essential Characteristics
///
/// An execution policy collects two essential characteristics: a type of execution agent defining execution requirements,
/// and an associated executor which creates execution with prescribed guarantees. When combined with control structures
/// like bulk_invoke(), the associated executor creates execution and the characteristics of this execution are reified
/// in the program as execution agent objects.
///
/// ### Parameterization
///
/// Aside from these characteristics, execution policy objects also encapsulate a *parameterization* describing
/// the group of execution agents to create when composed with a control structure. For most of Agency's execution agent types,
/// these parameters define the range of indices assigned to agents in the group.
///
/// An existing instance of an execution policy may be called like a function to produce a different an instance with a
/// different parameterization. For example, the execution policy agency::par may be called like a function to create a new
/// policy with a different parameterization:
///
/// ~~~~{.cpp}
/// // call seq like a function to produce an execution policy generating 13 agents
/// agency::bulk_invoke(agency::seq(13), [](agency::sequenced_agent& self)
/// {
/// std::cout << self.index() << std::endl;
/// });
///
/// // the integers [0,13) are printed in sequence
/// ~~~~
///
/// Alternatively, we can shift the origin of the group by passing agency::seq a half-open range:
///
/// ~~~~{.cpp}
/// agency::bulk_invoke(agency::seq(10,23), [](agency::sequenced_agent& self)
/// {
/// std::cout << self.index() << std::endl;
/// });
///
/// // the integers [10,23) are printed in sequence
/// ~~~~
///
/// ### The associated executor
///
/// Each of Agency's execution policies have an associated executor. The member function `.executor()` provides access to this executor:
///
/// ~~~~{.cpp}
/// // make a copy of par's associated executor
/// agency::parallel_executor par_exec = agency::par.executor();
/// ~~~~
///
/// The type of an execution policy's associated executor is named by the member type `executor_type`. Generic contexts such as templates may use this type:
///
/// ~~~~{.cpp}
/// template<class ExecutionPolicy>
/// void foo(ExecutionPolicy& policy)
/// {
/// // use the member type executor_type to make a copy of policy's associated executor
/// typename ExecutionPolicy::executor_type exec1 = policy.executor();
///
/// // alternatively, use auto
/// auto exec2 = policy.executor();
///
/// ...
/// }
/// ~~~~
///
/// ### Replacing an executor with `.on()`
///
/// An existing execution policy's associated executor may be *replaced* with the `.on()` member function. `.on()`
/// creates a new execution policy object whose associated executor is a copy of the given executor:
///
/// ~~~~{.cpp}
/// // suppose I have some existing executor
/// agency::sequenced_executor my_executor;
///
/// // associate my_executor with a new policy derived from agency::par
/// auto new_policy = agency::par.on(my_executor);
///
/// // now all execution generated by new_policy will be created "on" my_executor
/// ~~~~
#pragma once
#include <utility>
#include <functional>
#include <type_traits>
#include <functional>
#include <memory>
#include <tuple>
#include <initializer_list>
#include <agency/execution/execution_policy/basic_execution_policy.hpp>
#include <agency/execution/execution_policy/concurrent_execution_policy.hpp>
#include <agency/execution/execution_policy/execution_policy_traits.hpp>
#include <agency/execution/execution_policy/parallel_execution_policy.hpp>
#include <agency/execution/execution_policy/sequenced_execution_policy.hpp>
#include <agency/execution/execution_policy/unsequenced_execution_policy.hpp>
// XXX the stuff defined down there should be moved into separate headers
namespace agency
{
namespace experimental
{
namespace detail
{
template<class ExecutionPolicy, std::size_t group_size,
std::size_t grain_size = 1,
class ExecutionAgent = basic_static_execution_agent<
agency::detail::execution_policy_agent_t<ExecutionPolicy>,
group_size,
grain_size
>,
class Executor = agency::detail::execution_policy_executor_t<ExecutionPolicy>>
using basic_static_execution_policy = basic_execution_policy<
ExecutionAgent,
Executor
>;
} // end detail
template<size_t group_size, size_t grain_size = 1>
class static_sequenced_execution_policy : public detail::basic_static_execution_policy<agency::sequenced_execution_policy, group_size, grain_size>
{
private:
using super_t = detail::basic_static_execution_policy<agency::sequenced_execution_policy, group_size, grain_size>;
public:
using super_t::super_t;
};
template<size_t group_size, size_t grain_size = 1>
class static_concurrent_execution_policy : public detail::basic_static_execution_policy<
agency::concurrent_execution_policy,
group_size,
grain_size,
static_concurrent_agent<group_size, grain_size>
>
{
private:
using super_t = detail::basic_static_execution_policy<
agency::concurrent_execution_policy,
group_size,
grain_size,
static_concurrent_agent<group_size, grain_size>
>;
public:
using super_t::super_t;
};
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/execution/executor/detail/basic_grid_executor.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/coordinate/point.hpp>
namespace agency
{
namespace cuda
{
class concurrent_grid_executor : public detail::basic_grid_executor<concurrent_execution_tag, agency::uint2>
{
private:
using super_t = detail::basic_grid_executor<concurrent_execution_tag, agency::uint2>;
public:
using super_t::super_t;
__host__ __device__
shape_type unit_shape() const
{
return shape_type{detail::number_of_multiprocessors(device()), 256};
}
using super_t::max_shape_dimensions;
// XXX does any part of Agency actually use this function? maybe we should just get rid of it
// in favor of something that works more like the overload below
__host__ __device__
shape_type max_shape_dimensions() const
{
// XXX it's not clear that this is correct
return shape_type{detail::maximum_grid_size_x(device()), detail::maximum_block_size_x(device())};
}
};
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/algorithm/uninitialized_copy.hpp>
#include <agency/detail/iterator/iterator_traits.hpp>
namespace agency
{
namespace detail
{
namespace overlapped_uninitialized_copy_detail
{
template<class Iterator1, class Iterator2>
__AGENCY_ANNOTATION
Iterator2 uninitialized_copy_backward(Iterator1 first, Iterator1 last, Iterator2 result)
{
using value_type = typename std::iterator_traits<Iterator2>::value_type;
// yes, we preincrement
// the ranges are open on the right, i.e. [first, last)
while(first != last)
{
new(&*result) value_type(*first);
}
return result;
}
} // end overlapped_uninitialized_copy_detail
template<class ExecutionPolicy, class Iterator,
__AGENCY_REQUIRES(
is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value
),
__AGENCY_REQUIRES(
!policy_is_sequenced<decay_t<ExecutionPolicy>>::value and
iterator_is_random_access<Iterator>::value
)>
__AGENCY_ANNOTATION
Iterator overlapped_uninitialized_copy(ExecutionPolicy&& policy, Iterator first, Iterator last, Iterator result)
{
if(first < last && first <= result && result < last)
{
// result lies in [first, last)
// it's safe to use uninitialized_copy_backward here
overlapped_uninitialized_copy_detail::uninitialized_copy_backward(first, last, result + (last - first));
result += (last - first);
}
else
{
// result + (last - first) lies in [first, last)
// it's safe to use uninitialized_copy here
result = agency::detail::uninitialized_copy(std::forward<ExecutionPolicy>(policy), first, last, result);
} // end else
return result;
}
template<class ExecutionPolicy, class Iterator,
__AGENCY_REQUIRES(
is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value
),
__AGENCY_REQUIRES(
policy_is_sequenced<decay_t<ExecutionPolicy>>::value or
!iterator_is_random_access<Iterator>::value
)>
__AGENCY_ANNOTATION
Iterator overlapped_uninitialized_copy(ExecutionPolicy&&, Iterator first, Iterator last, Iterator result)
{
if(first < last && first <= result && result < last)
{
// result lies in [first, last)
// it's safe to use uninitialized_copy_backward here
overlapped_uninitialized_copy_detail::uninitialized_copy_backward(first, last, result + (last - first));
result += (last - first);
}
else
{
// result + (last - first) lies in [first, last)
// it's safe to use uninitialized_copy here
agency::sequenced_execution_policy seq;
result = agency::detail::uninitialized_copy(seq, first, last, result);
} // end else
return result;
}
template<class Iterator>
__AGENCY_ANNOTATION
Iterator overlapped_uninitialized_copy(Iterator first, Iterator last, Iterator result)
{
// pass this instead of agency::seq to work around the prohibition on
// taking the address of a global constexpr object (i.e., agency::seq) from a CUDA __device__ function
agency::sequenced_execution_policy seq;
return detail::overlapped_uninitialized_copy(seq, first, last, result);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/type_list.hpp>
#include <agency/detail/control_structures/executor_functions/bulk_then_executor.hpp>
#include <agency/detail/control_structures/decay_parameter.hpp>
#include <agency/detail/control_structures/single_result.hpp>
#include <agency/detail/control_structures/shared_parameter.hpp>
#include <agency/detail/control_structures/tuple_of_agent_shared_parameter_factories.hpp>
#include <agency/detail/control_structures/bulk_invoke_execution_policy.hpp>
#include <agency/detail/is_call_possible.hpp>
#include <agency/execution/execution_agent.hpp>
#include <agency/execution/executor/executor_traits/executor_shape.hpp>
#include <agency/execution/executor/executor_traits/detail/executor_barrier_types_as_scoped_in_place_type.hpp>
#include <agency/execution/execution_policy/execution_policy_traits.hpp>
#include <agency/tuple.hpp>
#include <utility>
namespace agency
{
namespace detail
{
template<class Executor, class AgentTraits, class Function, class Future, size_t... UserArgIndices>
struct then_execute_agent_functor
{
// XXX should just make the future's value_type a parameter of this functor and try to use it SFINAE the operator()s below
using agent_type = typename AgentTraits::execution_agent_type;
using agent_param_type = typename AgentTraits::param_type;
using agent_domain_type = typename AgentTraits::domain_type;
using agent_shape_type = decltype(std::declval<agent_domain_type>().shape());
using agent_execution_category = typename AgentTraits::execution_category;
using executor_shape_type = executor_shape_t<Executor>;
agent_param_type agent_param_;
agent_shape_type agent_shape_;
executor_shape_type executor_shape_;
Function f_;
using agent_index_type = typename AgentTraits::index_type;
using executor_index_type = executor_index_t<Executor>;
template<class OtherFunction, class Tuple, size_t... Indices>
__AGENCY_ANNOTATION
static result_of_t<OtherFunction(agent_type&)>
unpack_shared_params_and_execute(OtherFunction f, const agent_index_type& index, const agent_param_type& param, Tuple&& shared_params, detail::index_sequence<Indices...>)
{
return AgentTraits::execute(f, index, param, agency::get<Indices>(std::forward<Tuple>(shared_params))...);
}
// this overload of operator() handles the case where the Future given to then_execute() is non-void
template<class PastArg, class... Args,
class Future1 = Future,
class = typename std::enable_if<
is_non_void_future<Future1>::value
>::type>
__AGENCY_ANNOTATION
result_of_continuation_t<Function, agent_type&, Future, pack_element_t<UserArgIndices, Args...>...>
operator()(const executor_index_type& executor_idx, PastArg& past_arg, Args&&... args)
{
// collect all parameters into a tuple of references
auto args_tuple = agency::forward_as_tuple(std::forward<Args>(args)...);
// split the parameters into user parameters and agent parameters
auto user_args = detail::tuple_take_view<sizeof...(UserArgIndices)>(args_tuple);
auto agent_shared_args = detail::tuple_drop_view<sizeof...(UserArgIndices)>(args_tuple);
// turn the executor index into an agent index
auto agent_idx = detail::index_cast<agent_index_type>(executor_idx, executor_shape_, agent_shape_);
// AgentTraits::execute expects a function whose only parameter is agent_type
// so we have to wrap f_ into a function of one parameter
auto invoke_f = [&past_arg,&user_args,this](agent_type& self)
{
// invoke f by passing the agent, then the past_arg, then the user's parameters
return f_(self, past_arg, agency::get<UserArgIndices>(user_args)...);
};
return this->unpack_shared_params_and_execute(invoke_f, agent_idx, agent_param_, agent_shared_args, detail::make_tuple_indices(agent_shared_args));
}
// this overload of operator() handles the case where the Future given to then_execute() is void
// it is identical to the one above except that past_arg does not exist
template<class... Args,
class Future1 = Future,
class = typename std::enable_if<
is_void_future<Future1>::value
>::type>
__AGENCY_ANNOTATION
result_of_continuation_t<Function, agent_type&, Future, pack_element_t<UserArgIndices, Args...>...>
operator()(const executor_index_type& executor_idx, Args&&... args)
{
// collect all parameters into a tuple of references
auto args_tuple = agency::forward_as_tuple(std::forward<Args>(args)...);
// split the parameters into user parameters and agent parameters
auto user_args = detail::tuple_take_view<sizeof...(UserArgIndices)>(args_tuple);
auto agent_shared_args = detail::tuple_drop_view<sizeof...(UserArgIndices)>(args_tuple);
// turn the executor index into an agent index
auto agent_idx = detail::index_cast<agent_index_type>(executor_idx, executor_shape_, agent_shape_);
// AgentTraits::execute expects a function whose only parameter is agent_type
// so we have to wrap f_ into a function of one parameter
auto invoke_f = [&user_args,this](agent_type& self)
{
// invoke f by passing the agent, then the user's parameters
return f_(self, agency::get<UserArgIndices>(user_args)...);
};
return this->unpack_shared_params_and_execute(invoke_f, agent_idx, agent_param_, agent_shared_args, detail::make_tuple_indices(agent_shared_args));
}
};
template<class ExecutionPolicy, class Function, class Future, class... Args>
struct bulk_then_execution_policy_result
{
// figure out the Future's value_type
using future_value_type = typename future_traits<Future>::value_type;
// avoid passing Future to bulk_invoke_execution_policy_result when it is a void Future
using bulk_invoke_result_type = typename detail::lazy_conditional<
std::is_void<future_value_type>::value,
bulk_invoke_execution_policy_result<ExecutionPolicy,Function,Args...>,
bulk_invoke_execution_policy_result<ExecutionPolicy,Function,Future,Args...>
>::type;
using type = execution_policy_future_t<
ExecutionPolicy,
bulk_invoke_result_type
>;
};
template<class ExecutionPolicy, class Function, class Future, class... Args>
using bulk_then_execution_policy_result_t = typename bulk_then_execution_policy_result<ExecutionPolicy,Function,Future,Args...>::type;
template<size_t... UserArgIndices, size_t... SharedArgIndices, class ExecutionPolicy, class Function, class Future, class... Args>
__AGENCY_ANNOTATION
bulk_then_execution_policy_result_t<
ExecutionPolicy, Function, Future, Args...
>
bulk_then_execution_policy(index_sequence<UserArgIndices...>,
index_sequence<SharedArgIndices...>,
ExecutionPolicy& policy, Function f, Future& fut, Args&&... args)
{
using agent_type = typename ExecutionPolicy::execution_agent_type;
using agent_traits = execution_agent_traits<agent_type>;
// get the parameters of the agent
auto param = policy.param();
auto agent_shape = agent_traits::domain(param).shape();
using executor_type = typename ExecutionPolicy::executor_type;
// get a list of barrier types to create as a scoped_in_place_type_t
executor_barrier_types_as_scoped_in_place_type_t<executor_type> barriers;
// this is a tuple of factories
// each factory in the tuple creates the execution agent's shared parameter at the corresponding hierarchy level
auto tuple_of_factories = detail::make_tuple_of_agent_shared_parameter_factories<agent_type>(param, barriers);
// convert the shape of the agent into the type of the executor's shape
using executor_shape_type = executor_shape_t<executor_type>;
executor_shape_type executor_shape = detail::shape_cast<executor_shape_type>(agent_shape);
// create the function that will marshal parameters received from bulk_invoke(executor) and execute the agent
auto lambda = then_execute_agent_functor<executor_type,agent_traits,Function,Future,UserArgIndices...>{param, agent_shape, executor_shape, f};
return detail::bulk_then_executor(
policy.executor(),
executor_shape,
lambda,
fut,
std::forward<Args>(args)...,
agency::share_at_scope_from_factory<SharedArgIndices>(agency::get<SharedArgIndices>(tuple_of_factories))...
);
}
template<class ExecutionPolicy, class Function, class Future, class... Args>
struct is_bulk_then_possible_via_execution_policy_impl
{
template<class ExecutionPolicy1, class Function1, class Future1, class... Args1,
class = typename std::enable_if<
has_execution_agent_type<ExecutionPolicy1>::value
>::type,
class = typename std::enable_if<
is_non_void_future<Future1>::value
>::type,
class = typename enable_if_call_possible<
void, Function1, execution_policy_agent_t<ExecutionPolicy1>&, decay_parameter_t<Future1>, decay_parameter_t<Args1>...
>::type
>
static std::true_type test_non_void(int);
template<class...>
static std::false_type test_non_void(...);
template<class ExecutionPolicy1, class Function1, class Future1, class... Args1,
class = typename std::enable_if<
has_execution_agent_type<ExecutionPolicy1>::value
>::type,
class = typename std::enable_if<
is_void_future<Future1>::value
>::type,
class = typename enable_if_call_possible<
void, Function1, execution_policy_agent_t<ExecutionPolicy1>&, decay_parameter_t<Args1>...
>::type
>
static std::true_type test_void(int);
template<class...>
static std::false_type test_void(...);
// there are two tests: one applies when Future is a void future
using test_void_result = decltype(test_void<ExecutionPolicy,Function,Future,Args...>(0));
// ther other applies when Future is a non-void future
using test_non_void_result = decltype(test_non_void<ExecutionPolicy,Function,Future,Args...>(0));
// if either test passed, then the result is true
using type = detail::disjunction<test_void_result,test_non_void_result>;
};
template<class ExecutionPolicy, class Function, class Future, class... Args>
using is_bulk_then_possible_via_execution_policy = typename is_bulk_then_possible_via_execution_policy_impl<ExecutionPolicy,Function,Future,Args...>::type;
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/control_structures/bind.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points/async_execute.hpp>
#include <agency/execution/executor/parallel_executor.hpp>
#include <agency/detail/type_traits.hpp>
#include <utility>
namespace agency
{
template<class Executor, class Function, class... Args>
__AGENCY_ANNOTATION
executor_future_t<
Executor,
detail::result_of_t<
typename std::decay<Function&&>::type(typename std::decay<Args&&>::type...)
>
>
async(Executor& exec, Function&& f, Args&&... args)
{
auto g = detail::bind(std::forward<Function>(f), std::forward<Args>(args)...);
return agency::async_execute(exec, std::move(g));
}
template<class Function, class... Args>
executor_future_t<
agency::detail::thread_pool_executor,
detail::result_of_t<
typename std::decay<Function&&>::type(typename std::decay<Args&&>::type...)
>
>
async(Function&& f, Args&&... args)
{
agency::detail::thread_pool_executor exec;
return agency::async(exec, std::forward<Function>(f), std::forward<Args>(args)...);
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/customization_points/bulk_sync_execute.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/factory.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/integer_sequence.hpp>
namespace agency
{
namespace detail
{
namespace bulk_sync_execute_with_one_shared_parameter_detail
{
template<class Function>
struct ignore_trailing_shared_parameters_and_invoke
{
mutable Function f;
template<class Index, class Result, class SharedArg, class... IgnoredArgs>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Result& result, SharedArg& shared_arg, IgnoredArgs&...) const
{
agency::detail::invoke(f, idx, result, shared_arg);
}
};
template<size_t>
using factory_returning_ignored_result = agency::detail::unit_factory;
template<size_t... Indices, class E, class Function, class ResultFactory, class SharedFactory>
__AGENCY_ANNOTATION
result_of_t<ResultFactory()>
bulk_sync_execute_with_one_shared_parameter_impl(index_sequence<Indices...>,
E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, SharedFactory shared_factory)
{
bulk_sync_execute_with_one_shared_parameter_detail::ignore_trailing_shared_parameters_and_invoke<Function> execute_me{f};
return agency::bulk_sync_execute(exec,
execute_me, // the functor to execute
shape, // the number of agents to create
result_factory, // the factory to create the result
shared_factory, // the factory to create the shared parameter
factory_returning_ignored_result<Indices>()... // pass a factory for each inner level of execution hierarchy. the results of these factories will be ignored
);
}
} // end bulk_sync_execute_with_one_shared_parameter_detail
template<class E, class Function, class ResultFactory, class SharedFactory,
__AGENCY_REQUIRES(BulkExecutor<E>())
>
__AGENCY_ANNOTATION
result_of_t<ResultFactory()>
bulk_sync_execute_with_one_shared_parameter(E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, SharedFactory shared_factory)
{
return bulk_sync_execute_with_one_shared_parameter_detail::bulk_sync_execute_with_one_shared_parameter_impl(
detail::make_index_sequence<executor_execution_depth<E>::value - 1>(),
exec,
f,
shape,
result_factory,
shared_factory
);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/tuple.hpp>
#include <agency/execution/executor/detail/utility/executor_bulk_result_or_void.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_collected_result.hpp>
#include <agency/detail/control_structures/executor_functions/bind_agent_local_parameters.hpp>
#include <agency/detail/control_structures/executor_functions/unpack_shared_parameters_from_executor_and_invoke.hpp>
#include <agency/detail/control_structures/executor_functions/result_factory.hpp>
#include <agency/detail/control_structures/scope_result.hpp>
#include <agency/detail/control_structures/decay_parameter.hpp>
#include <agency/detail/type_traits.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
// this overload handles the general case where the user function returns a non-void result
template<class Executor, class Function, class ResultFactory, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
result_of_t<ResultFactory()>
bulk_invoke_executor_impl(Executor& exec,
Function f,
ResultFactory result_factory,
executor_shape_t<Executor> shape,
Tuple&& shared_factory_tuple,
detail::index_sequence<TupleIndices...>)
{
return detail::bulk_sync_execute_with_collected_result(exec, f, shape, result_factory, agency::get<TupleIndices>(std::forward<Tuple>(shared_factory_tuple))...);
}
// this overload handles the special case where the user function returns void
template<class Executor, class Function, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
void bulk_invoke_executor_impl(Executor& exec,
Function f,
void_factory,
executor_shape_t<Executor> shape,
Tuple&& factory_tuple,
detail::index_sequence<TupleIndices...>)
{
return detail::bulk_sync_execute_with_void_result(exec, f, shape, agency::get<TupleIndices>(std::forward<Tuple>(factory_tuple))...);
}
// computes the result type of bulk_invoke(executor)
template<class Executor, class Function, class... Args>
struct bulk_invoke_executor_result
{
// first figure out what type the user function returns
using user_function_result = result_of_t<
Function(executor_index_t<Executor>, decay_parameter_t<Args>...)
>;
// if the user function returns scope_result, then use scope_result_to_bulk_invoke_result to figure out what to return
// else, the result is whatever executor_bulk_result_or_void<Executor, function_result> thinks it is
using type = typename lazy_conditional<
is_scope_result<user_function_result>::value,
scope_result_to_bulk_invoke_result<user_function_result, Executor>,
executor_bulk_result_or_void<Executor, user_function_result>
>::type;
};
template<class Executor, class Function, class... Args>
using bulk_invoke_executor_result_t = typename bulk_invoke_executor_result<Executor,Function,Args...>::type;
template<class Executor, class Function, class... Args>
__AGENCY_ANNOTATION
bulk_invoke_executor_result_t<Executor, Function, Args...>
bulk_invoke_executor(Executor& exec, executor_shape_t<Executor> shape, Function f, Args&&... args)
{
// the _1 is for the executor idx parameter, which is the first parameter passed to f
auto g = detail::bind_agent_local_parameters_workaround_nvbug1754712(std::integral_constant<size_t,1>(), f, detail::placeholders::_1, std::forward<Args>(args)...);
// make a tuple of the shared args
auto shared_arg_tuple = detail::forward_shared_parameters_as_tuple(std::forward<Args>(args)...);
// package up the shared parameters for the executor
const size_t execution_depth = executor_execution_depth<Executor>::value;
// create a tuple of factories to use for shared parameters for the executor
auto factory_tuple = detail::make_shared_parameter_factory_tuple<execution_depth>(shared_arg_tuple);
// unpack shared parameters we receive from the executor
auto h = detail::make_unpack_shared_parameters_from_executor_and_invoke(g);
// compute the type of f's result
using result_of_f = result_of_t<Function(executor_index_t<Executor>,decay_parameter_t<Args>...)>;
// based on the type of f's result, make a factory that will create the appropriate type of container to store f's results
auto result_factory = detail::make_result_factory<result_of_f>(exec, shape);
return detail::bulk_invoke_executor_impl(exec, h, result_factory, shape, factory_tuple, detail::make_index_sequence<execution_depth>());
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/customization_points/bulk_sync_execute.hpp>
#include <agency/execution/executor/detail/utility/invoke_functors.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/factory.hpp>
namespace agency
{
namespace detail
{
__agency_exec_check_disable__
template<class E, class Function, class... Factories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
void bulk_sync_execute_with_void_result(E& exec, Function f, executor_shape_t<E> shape, Factories... factories)
{
// wrap f in a functor that will ignore the unit object we pass to it
ignore_unit_result_parameter_and_invoke<Function> g{f};
// just call bulk_sync_execute() and use a result factory that creates a unit object which can be easily discarded
agency::bulk_sync_execute(exec, g, shape, unit_factory(), factories...);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points/bulk_then_execute.hpp>
#include <agency/detail/invoke.hpp>
namespace agency
{
namespace detail
{
// this adaptor turns an Executor into a BulkContinuationExecutor
// XXX eliminate this when Agency drops support for legacy executors
template<class E, bool Enable = BulkExecutor<E>()>
class bulk_continuation_executor_adaptor;
template<class BulkExecutor>
class bulk_continuation_executor_adaptor<BulkExecutor,true>
{
private:
BulkExecutor adapted_executor_;
public:
using execution_category = member_execution_category_or_t<BulkExecutor, unsequenced_execution_tag>;
using shape_type = executor_shape_t<BulkExecutor>;
using index_type = executor_index_t<BulkExecutor>;
template<class T>
using future = executor_future_t<BulkExecutor,T>;
template<class T>
using allocator = executor_allocator_t<BulkExecutor,T>;
__AGENCY_ANNOTATION
bulk_continuation_executor_adaptor() = default;
__AGENCY_ANNOTATION
bulk_continuation_executor_adaptor(const bulk_continuation_executor_adaptor&) = default;
__AGENCY_ANNOTATION
bulk_continuation_executor_adaptor(const BulkExecutor& other)
: adapted_executor_(other)
{}
template<class Function, class Future, class ResultFactory, class... SharedFactories>
__AGENCY_ANNOTATION
future<result_of_t<ResultFactory()>>
bulk_then_execute(Function f, shape_type shape, Future& predecessor, ResultFactory result_factory, SharedFactories... shared_factories)
{
return agency::bulk_then_execute(adapted_executor_, f, shape, predecessor, result_factory, shared_factories...);
}
};
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/utility.hpp>
#include <cstddef>
#include <tuple>
#include <utility>
namespace agency
{
namespace detail
{
template<class T, size_t Index>
struct named_array_element;
template<class T>
struct named_array_element<T,0>
{
__AGENCY_ANNOTATION
named_array_element() = default;
__AGENCY_ANNOTATION
named_array_element(const named_array_element&) = default;
__AGENCY_ANNOTATION
explicit named_array_element(const T& value) : x(value) {}
T x;
};
template<class T>
struct named_array_element<T,1>
{
__AGENCY_ANNOTATION
named_array_element() = default;
__AGENCY_ANNOTATION
named_array_element(const named_array_element&) = default;
__AGENCY_ANNOTATION
explicit named_array_element(const T& value) : y(value) {}
T y;
};
template<class T>
struct named_array_element<T,2>
{
__AGENCY_ANNOTATION
named_array_element() = default;
__AGENCY_ANNOTATION
named_array_element(const named_array_element&) = default;
__AGENCY_ANNOTATION
explicit named_array_element(const T& value) : z(value) {}
T z;
};
template<class T>
struct named_array_element<T,3>
{
__AGENCY_ANNOTATION
named_array_element() = default;
__AGENCY_ANNOTATION
named_array_element(const named_array_element&) = default;
__AGENCY_ANNOTATION
explicit named_array_element(const T& value) : w(value) {}
T w;
};
template<class T, class Indices>
struct named_array_base;
template<class T, size_t... Indices>
struct named_array_base<T, index_sequence<Indices...>> : named_array_element<T,Indices>...
{
__AGENCY_ANNOTATION
named_array_base() = default;
__AGENCY_ANNOTATION
named_array_base(const named_array_base&) = default;
__AGENCY_ANNOTATION
named_array_base(std::initializer_list<T> values)
: named_array_element<T,Indices>(values.begin()[Indices])...
{}
};
// a named_array is an array where each element has its own name, e.g. [x, y, z, w]
template<class T, size_t N>
struct named_array : named_array_base<T,make_index_sequence<N>>
{
static_assert(0 < N && N < 5, "named_array's size must be at least one and less than five.");
using value_type = T;
using size_type = std::size_t;
using difference_type = std::ptrdiff_t;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = value_type*;
using const_pointer = const value_type*;
using iterator = pointer;
using const_iterator = const_pointer;
__AGENCY_ANNOTATION
named_array() = default;
__AGENCY_ANNOTATION
named_array(const named_array&) = default;
__AGENCY_ANNOTATION
named_array(std::initializer_list<T> values)
: named_array_base<T,make_index_sequence<N>>(values)
{}
__AGENCY_ANNOTATION
reference operator[](size_type pos)
{
return begin()[pos];
}
__AGENCY_ANNOTATION
const_reference operator[](size_type pos) const
{
return begin()[pos];
}
__AGENCY_ANNOTATION
reference front()
{
return *begin();
}
__AGENCY_ANNOTATION
const_reference front() const
{
return *begin();
}
__AGENCY_ANNOTATION
reference back()
{
// return *rbegin();
return operator[](N-1);
}
__AGENCY_ANNOTATION
const_reference back() const
{
// return *rbegin();
return operator[](N-1);
}
__AGENCY_ANNOTATION
T* data()
{
return &this->x;
}
__AGENCY_ANNOTATION
const T* data() const
{
return &this->x;
}
__AGENCY_ANNOTATION
iterator begin()
{
return data();
}
__AGENCY_ANNOTATION
const_iterator begin() const
{
return data();
}
__AGENCY_ANNOTATION
const_iterator cbegin()
{
return begin();
}
__AGENCY_ANNOTATION
const_iterator cbegin() const
{
return begin();
}
__AGENCY_ANNOTATION
iterator end()
{
return data() + size();
}
__AGENCY_ANNOTATION
const_iterator end() const
{
return data() + size();
}
__AGENCY_ANNOTATION
const_iterator cend()
{
return end();
}
__AGENCY_ANNOTATION
const_iterator cend() const
{
return end();
}
__AGENCY_ANNOTATION
constexpr bool empty() const
{
return size() == 0;
}
__AGENCY_ANNOTATION
constexpr size_type size() const
{
return N;
}
__AGENCY_ANNOTATION
constexpr size_type max_size() const
{
return size();
}
__AGENCY_ANNOTATION
void fill(const T& value)
{
for(auto& e : *this)
{
e = value;
}
}
__AGENCY_ANNOTATION
void swap(named_array& other)
{
for(size_type i = 0; i < size(); ++i)
{
agency::detail::adl_swap((*this)[i], other[i]);
}
}
template<std::size_t I, __AGENCY_REQUIRES(I < N)>
__AGENCY_ANNOTATION
T& get() &
{
return operator[](I);
}
template<std::size_t I, __AGENCY_REQUIRES(I < N)>
__AGENCY_ANNOTATION
const T& get() const &
{
return operator[](I);
}
template<std::size_t I, __AGENCY_REQUIRES(I < N)>
__AGENCY_ANNOTATION
T&& get() &&
{
return std::move(operator[](I));
}
};
} // end detail
} // end agency
// specialize tuple-related functionality for agency::detail::named_array
namespace std
{
template<class T, std::size_t N>
struct tuple_size<agency::detail::named_array<T,N>> : std::integral_constant<std::size_t, N> {};
template<std::size_t I, class T, std::size_t N>
struct tuple_element<I, agency::detail::named_array<T,N>>
{
using type = T;
};
} // end std
<file_sep>#include <iostream>
#include <cassert>
#include <agency/execution/executor/customization_points/make_ready_future.hpp>
#include "../test_executors.hpp"
template<class Executor>
void test(Executor exec)
{
// make a void future
{
auto f = agency::make_ready_future<void>(exec);
assert(f.valid());
f.wait();
}
// make an int future
{
auto f = agency::make_ready_future<int>(exec, 13);
assert(f.valid());
assert(f.get() == 13);
}
}
int main()
{
test(bulk_synchronous_executor());
test(bulk_asynchronous_executor());
test(bulk_continuation_executor());
test(not_a_bulk_synchronous_executor());
test(not_a_bulk_asynchronous_executor());
test(not_a_bulk_continuation_executor());
test(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_collected_result.hpp>
#include <agency/execution/executor/detail/utility/executor_bulk_result.hpp>
#include <agency/detail/factory.hpp>
namespace agency
{
namespace detail
{
// this is the case for when Function returns void
__agency_exec_check_disable__
template<class E, class Function, class... Factories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories)),
__AGENCY_REQUIRES(std::is_void<result_of_t<Function(executor_index_t<E>, result_of_t<Factories()>&...)>>::value)
>
__AGENCY_ANNOTATION
void bulk_sync_execute_with_auto_result(E& exec, Function f, executor_shape_t<E> shape, Factories... factories)
{
return detail::bulk_sync_execute_with_void_result(exec, f, shape, factories...);
}
// this is the case for when Function returns non-void
// in this case, this function collects
// the results of each invocation into a container
// this container is returned through a future
template<class E, class Function, class... Factories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories)),
__AGENCY_REQUIRES(!std::is_void<result_of_t<Function(executor_index_t<E>, result_of_t<Factories()>&...)>>::value)
>
__AGENCY_ANNOTATION
executor_bulk_result_t<
E,
result_of_t<Function(executor_index_t<E>,result_of_t<Factories()>&...)>
>
bulk_sync_execute_with_auto_result(E& exec, Function f, executor_shape_t<E> shape, Factories... factories)
{
// compute the type of f's result
using result_type = result_of_t<Function(executor_index_t<E>,result_of_t<Factories()>&...)>;
// compute the type of container that will store f's results
using container_type = executor_bulk_result_t<E,result_type>;
// create a factory that will construct this type of container for us
auto result_factory = detail::make_construct<container_type>(shape);
// lower onto bulk_sync_execute_with_collected_result() with this result_factory
return detail::bulk_sync_execute_with_collected_result(exec, f, shape, result_factory, factories...);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/control_structures/executor_functions/bulk_async_executor.hpp>
#include <agency/detail/control_structures/execute_agent_functor.hpp>
#include <agency/detail/control_structures/single_result.hpp>
#include <agency/detail/control_structures/bulk_invoke_execution_policy.hpp>
#include <agency/detail/control_structures/shared_parameter.hpp>
#include <agency/detail/control_structures/tuple_of_agent_shared_parameter_factories.hpp>
#include <agency/execution/execution_agent.hpp>
#include <agency/execution/executor/executor_traits/executor_shape.hpp>
#include <agency/execution/executor/executor_traits/detail/executor_barrier_types_as_scoped_in_place_type.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/tuple.hpp>
#include <utility>
namespace agency
{
namespace detail
{
template<class ExecutionPolicy, class Function, class... Args>
struct bulk_async_execution_policy_result
{
using type = execution_policy_future_t<
ExecutionPolicy,
bulk_invoke_execution_policy_result_t<ExecutionPolicy,Function,Args...>
>;
};
template<class ExecutionPolicy, class Function, class... Args>
using bulk_async_execution_policy_result_t = typename bulk_async_execution_policy_result<ExecutionPolicy,Function,Args...>::type;
template<size_t... UserArgIndices, size_t... SharedArgIndices, class ExecutionPolicy, class Function, class... Args>
__AGENCY_ANNOTATION
bulk_async_execution_policy_result_t<
ExecutionPolicy, Function, Args...
>
bulk_async_execution_policy(index_sequence<UserArgIndices...>,
index_sequence<SharedArgIndices...>,
ExecutionPolicy& policy, Function f, Args&&... args)
{
using agent_type = typename ExecutionPolicy::execution_agent_type;
using agent_traits = execution_agent_traits<agent_type>;
// get the parameters of the agent
auto param = policy.param();
auto agent_shape = agent_traits::domain(param).shape();
using executor_type = typename ExecutionPolicy::executor_type;
// get a list of barrier types to create as a scoped_in_place_type_t
executor_barrier_types_as_scoped_in_place_type_t<executor_type> barriers;
// this is a tuple of factories
// each factory in the tuple creates the execution agent's shared parameter at the corresponding hierarchy level
auto tuple_of_factories = detail::make_tuple_of_agent_shared_parameter_factories<agent_type>(param, barriers);
// convert the shape of the agent into the type of the executor's shape
using executor_shape_type = executor_shape_t<executor_type>;
executor_shape_type executor_shape = detail::shape_cast<executor_shape_type>(agent_shape);
// create the function that will marshal parameters received from bulk_invoke(executor) and execute the agent
auto lambda = execute_agent_functor<executor_type,agent_traits,Function,UserArgIndices...>{param, agent_shape, executor_shape, f};
return detail::bulk_async_executor(
policy.executor(),
executor_shape,
lambda,
std::forward<Args>(args)...,
agency::share_at_scope_from_factory<SharedArgIndices>(agency::get<SharedArgIndices>(tuple_of_factories))...
);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/control_structures/executor_functions/shared_parameter_packaging.hpp>
#include <agency/tuple.hpp>
namespace agency
{
namespace detail
{
// this is the functor that bulk functions like bulk_invoke
// execute via an executor to unpack potentially many shared parameters
// from the packaging used with the executor and the invoke the given function
// with the execution agent index and unpacked shared parameters as arguments
template<class Function>
struct unpack_shared_parameters_from_executor_and_invoke
{
mutable Function g;
template<class Index, class... Types>
__AGENCY_ANNOTATION
auto operator()(const Index& idx, Types&... packaged_shared_params) const
-> decltype(
__tu::tuple_apply(
g,
__tu::tuple_prepend_invoke(
agency::detail::unpack_shared_parameters_from_executor(packaged_shared_params...),
idx,
agency::detail::forwarder{})
)
)
{
auto shared_params = agency::detail::unpack_shared_parameters_from_executor(packaged_shared_params...);
// XXX the following is the moral equivalent of:
// g(idx, shared_params...);
// create one big tuple of the arguments so we can just call tuple_apply
auto idx_and_shared_params = __tu::tuple_prepend_invoke(shared_params, idx, agency::detail::forwarder{});
return __tu::tuple_apply(g, idx_and_shared_params);
}
};
template<class Function>
__AGENCY_ANNOTATION
unpack_shared_parameters_from_executor_and_invoke<Function> make_unpack_shared_parameters_from_executor_and_invoke(Function f)
{
return unpack_shared_parameters_from_executor_and_invoke<Function>{f};
}
} // end detail
} // end agency
<file_sep>#include <agency/execution/executor/executor_traits.hpp>
#include <type_traits>
#include <iostream>
#include "../test_executors.hpp"
int main()
{
using namespace agency;
// test not_an_executor
static_assert(!is_bulk_executor<not_an_executor>::value, "not_an_executor is not supposed to be a bulk executor");
static_assert(!is_bulk_synchronous_executor<not_an_executor>::value, "not_an_executor is not supposed to be a bulk synchronous executor");
static_assert(!is_bulk_asynchronous_executor<not_an_executor>::value, "not_an_executor is not supposed to be a bulk asynchronous executor");
static_assert(!is_bulk_continuation_executor<not_an_executor>::value, "not_an_executor is not supposed to be a bulk continuation executor");
// test bulk_synchronous_executor
static_assert(is_bulk_executor<bulk_synchronous_executor>::value, "bulk_synchronous_executor is supposed to be a bulk executor");
static_assert(is_bulk_synchronous_executor<bulk_synchronous_executor>::value, "bulk_synchronous_executor is supposed to be a bulk synchronous executor");
static_assert(!is_bulk_asynchronous_executor<bulk_synchronous_executor>::value, "bulk_synchronous_executor is not supposed to be a bulk asynchronous executor");
static_assert(!is_bulk_continuation_executor<bulk_synchronous_executor>::value, "bulk_synchronous_executor is not supposed to be a bulk continuation executor");
// test bulk_asynchronous_executor
static_assert(is_bulk_executor<bulk_asynchronous_executor>::value, "bulk_asynchronous_executor is supposed to be a bulk executor");
static_assert(!is_bulk_synchronous_executor<bulk_asynchronous_executor>::value, "bulk_asynchronous_executor is not supposed to be a bulk synchronous executor");
static_assert(is_bulk_asynchronous_executor<bulk_asynchronous_executor>::value, "bulk_asynchronous_executor is supposed to be a bulk asynchronous executor");
static_assert(!is_bulk_continuation_executor<bulk_asynchronous_executor>::value, "bulk_asynchronous_executor is not supposed to be a bulk continuation executor");
// test bulk_continuation_executor
static_assert(is_bulk_executor<bulk_continuation_executor>::value, "bulk_continuation_executor is supposed to be a bulk executor");
static_assert(!is_bulk_synchronous_executor<bulk_continuation_executor>::value, "bulk_continuation_executor is not supposed to be a bulk synchronous executor");
static_assert(!is_bulk_asynchronous_executor<bulk_continuation_executor>::value, "bulk_continuation_executor is not supposed to be a bulk asynchronous executor");
static_assert(is_bulk_continuation_executor<bulk_continuation_executor>::value, "bulk_continuation_executor is supposed to be a bulk continuation executor");
// test not_a_bulk_synchronous_executor
static_assert(is_bulk_executor<not_a_bulk_synchronous_executor>::value, "not_a_bulk_synchronous_executor is supposed to be a bulk executor");
static_assert(!is_bulk_synchronous_executor<not_a_bulk_synchronous_executor>::value, "not_a_bulk_synchronous_executor is not supposed to be a bulk synchronous executor");
static_assert(is_bulk_asynchronous_executor<not_a_bulk_synchronous_executor>::value, "not_a_bulk_synchronous_executor is supposed to be a bulk asynchronous executor");
static_assert(is_bulk_continuation_executor<not_a_bulk_synchronous_executor>::value, "not_a_bulk_synchronous_executor is supposed to be a bulk continuation executor");
// test not_a_bulk_asynchronous_executor
static_assert(is_bulk_executor<not_a_bulk_asynchronous_executor>::value, "not_a_bulk_asynchronous_executor is supposed to be a bulk executor");
static_assert(is_bulk_synchronous_executor<not_a_bulk_asynchronous_executor>::value, "not_a_bulk_asynchronous_executor is supposed to be a bulk synchronous executor");
static_assert(!is_bulk_asynchronous_executor<not_a_bulk_asynchronous_executor>::value, "not_a_bulk_asynchronous_executor is not supposed to be a bulk asynchronous executor");
static_assert(is_bulk_continuation_executor<not_a_bulk_asynchronous_executor>::value, "not_a_bulk_asynchronous_executor is supposed to be a bulk continuation executor");
// test not_a_bulk_continuation_executor
static_assert(is_bulk_executor<not_a_bulk_continuation_executor>::value, "not_a_bulk_continuation_executor is supposed to be a bulk executor");
static_assert(is_bulk_synchronous_executor<not_a_bulk_continuation_executor>::value, "not_a_bulk_continuation_executor is supposed to be a bulk synchronous executor");
static_assert(is_bulk_asynchronous_executor<not_a_bulk_continuation_executor>::value, "not_a_bulk_continuation_executor is not supposed to be a bulk asynchronous executor");
static_assert(!is_bulk_continuation_executor<not_a_bulk_continuation_executor>::value, "not_a_bulk_continuation_executor is supposed to be a bulk continuation executor");
// test not_a_bulk_synchronous_executor
static_assert(is_bulk_executor<complete_bulk_executor>::value, "complete_bulk_executor is supposed to be a bulk executor");
static_assert(is_bulk_synchronous_executor<complete_bulk_executor>::value, "complete_bulk_executor is supposed to be a bulk synchronous executor");
static_assert(is_bulk_asynchronous_executor<complete_bulk_executor>::value, "complete_bulk_executor is supposed to be a bulk asynchronous executor");
static_assert(is_bulk_continuation_executor<complete_bulk_executor>::value, "complete_bulk_executor is supposed to be a bulk continuation executor");
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/execution/executor/parallel_executor.hpp>
#include <agency/execution/executor/vector_executor.hpp>
#include <agency/execution/executor/scoped_executor.hpp>
#include <agency/execution/executor/flattened_executor.hpp>
#include <agency/detail/concurrency/latch.hpp>
#include <agency/detail/concurrency/concurrent_queue.hpp>
#include <agency/detail/unique_function.hpp>
#include <agency/future.hpp>
#include <agency/detail/type_traits.hpp>
#include <thread>
#include <vector>
#include <algorithm>
#include <memory>
#include <future>
namespace agency
{
namespace detail
{
class thread_pool
{
private:
struct joining_thread : std::thread
{
using std::thread::thread;
joining_thread(joining_thread&&) = default;
~joining_thread()
{
if(joinable()) join();
}
};
public:
explicit thread_pool(size_t num_threads = std::max(1u, std::thread::hardware_concurrency()))
{
for(size_t i = 0; i < num_threads; ++i)
{
threads_.emplace_back([this]
{
work();
});
}
}
~thread_pool()
{
tasks_.close();
threads_.clear();
}
template<class Function,
class = result_of_t<Function()>>
inline void submit(Function&& f)
{
auto is_this_thread = [=](const joining_thread& t)
{
return t.get_id() == std::this_thread::get_id();
};
// guard against self-submission which may result in deadlock
// XXX it might be faster to compare this to a thread_local variable
if(std::find_if(threads_.begin(), threads_.end(), is_this_thread) == threads_.end())
{
tasks_.emplace(std::forward<Function>(f));
}
else
{
// the submitting thread is part of this pool so execute immediately
std::forward<Function>(f)();
}
}
inline size_t size() const
{
return threads_.size();
}
template<class Function, class... Args>
std::future<result_of_t<Function(Args...)>>
async(Function&& f, Args&&... args)
{
// bind f & args together
auto g = std::bind(std::forward<Function>(f), std::forward<Args>(args)...);
using result_type = result_of_t<Function(Args...)>;
// create a packaged task
std::packaged_task<result_type()> task(std::move(g));
// get the packaged task's future so we can return it at the end
auto result_future = task.get_future();
// move the packaged task into the thread pool
submit(std::move(task));
return std::move(result_future);
}
private:
inline void work()
{
unique_function<void()> task;
while(tasks_.wait_and_pop(task))
{
task();
}
}
agency::detail::concurrent_queue<unique_function<void()>> tasks_;
std::vector<joining_thread> threads_;
};
inline thread_pool& system_thread_pool()
{
static thread_pool resource;
return resource;
}
class thread_pool_executor
{
public:
using execution_category = parallel_execution_tag;
template<class Function, class ResultFactory, class SharedFactory>
result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
auto result = result_factory();
auto shared_arg = shared_factory();
// XXX we might prefer to unconditionally execute task 0 inline
if(n <= 1)
{
if(n == 1) f(0, result, shared_arg);
}
else
{
agency::detail::latch work_remaining(n);
for(size_t idx = 0; idx < n; ++idx)
{
system_thread_pool().submit([=,&result,&shared_arg,&work_remaining] () mutable
{
f(idx, result, shared_arg);
work_remaining.count_down(1);
});
}
// wait for all the work to complete
work_remaining.wait();
}
return std::move(result);
}
private:
// this deleter fulfills a promise just before
// it deletes its argument
template<class ResultType>
struct fulfill_promise_and_delete
{
std::shared_ptr<std::promise<ResultType>> shared_promise_ptr;
void operator()(ResultType* ptr_to_result)
{
// move the result object into the promise
shared_promise_ptr->set_value(std::move(*ptr_to_result));
// delete the pointer
delete ptr_to_result;
}
};
public:
// this is the overload of bulk_then_execute for non-void Future
template<class Function, class Future, class ResultFactory, class SharedFactory,
__AGENCY_REQUIRES(!std::is_void<future_value_t<Future>>::value)
>
std::future<
result_of_t<ResultFactory()>
>
bulk_then_execute(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory)
{
using result_type = result_of_t<ResultFactory()>;
// create a shared promise to fulfill the result
auto shared_promise_ptr = std::make_shared<std::promise<result_type>>();
// get the shared promise's future
auto result_future = shared_promise_ptr->get_future();
// create a deleter which fulfills the promise with the result and then deletes the result
fulfill_promise_and_delete<result_type> deleter{std::move(shared_promise_ptr)};
// create the shared state for the result
// note that we use our special deleter with this state
auto shared_result_ptr = std::shared_ptr<result_type>(new result_type(result_factory()), std::move(deleter));
// create the shared state for the shared parameter
using shared_arg_type = result_of_t<SharedFactory()>;
auto shared_arg_ptr = std::make_shared<shared_arg_type>(shared_factory());
// share the incoming future
auto shared_predecessor = future_traits<Future>::share(predecessor);
// submit n tasks to the thread pool
for(size_t idx = 0; idx < n; ++idx)
{
system_thread_pool().submit([=]() mutable
{
// nvcc makes this lambda's constructors __host__ __device__ when
// any of its captures' constructors are __host__ __device__. This causes nvcc
// to emit warnings about a __host__ __device__ function calling __host__ functions
// this #ifndef works around this problem
#ifndef __CUDA_ARCH__
// get the predecessor future's value
using predecessor_type = future_value_t<Future>;
predecessor_type& predecessor_arg = const_cast<predecessor_type&>(shared_predecessor.get());
// call the user's function
f(idx, predecessor_arg, *shared_result_ptr, *shared_arg_ptr);
// we explicitly release shared_result_ptr because even though this
// lambda's invocation is complete, the lambda's lifetime
// (and therefore shared_result_ptr's lifetime) is not necessarily complete
// this .reset() is what fulfills the promise via shared_result_ptr's deleter
shared_result_ptr.reset();
#endif
});
}
// return the result future
return std::move(result_future);
}
// this is the overload of bulk_then_execute for void Future
template<class Function, class Future, class ResultFactory, class SharedFactory,
__AGENCY_REQUIRES(std::is_void<future_value_t<Future>>::value)
>
std::future<
result_of_t<ResultFactory()>
>
bulk_then_execute(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory)
{
using result_type = result_of_t<ResultFactory()>;
// create a shared promise to fulfill the result
auto shared_promise_ptr = std::make_shared<std::promise<result_type>>();
// get the shared promise's future
auto result_future = shared_promise_ptr->get_future();
// create a deleter which fulfills the promise with the result and then deletes the result
fulfill_promise_and_delete<result_type> deleter{std::move(shared_promise_ptr)};
// create the shared state for the result
auto shared_result_ptr = std::shared_ptr<result_type>(new result_type(result_factory()), std::move(deleter));
// create the shared state for the shared parameter
using shared_arg_type = result_of_t<SharedFactory()>;
auto shared_arg_ptr = std::make_shared<shared_arg_type>(shared_factory());
// share the incoming future
auto shared_predecessor = future_traits<Future>::share(predecessor);
// submit n tasks to the thread pool
for(size_t idx = 0; idx < n; ++idx)
{
system_thread_pool().submit([=]() mutable
{
// nvcc makes this lambda's constructors __host__ __device__ when
// any of its captures' constructors are __host__ __device__. This causes nvcc
// to emit warnings about a __host__ __device__ function calling __host__ functions
// this #ifndef works around this problem
#ifndef __CUDA_ARCH__
// wait on the predecessor future
shared_predecessor.wait();
// call the user's function
f(idx, *shared_result_ptr, *shared_arg_ptr);
// we explicitly release shared_result_ptr because even though this
// lambda's invocation is complete, the lambda's lifetime
// (and therefore shared_result_ptr's lifetime) is not necessarily complete
// this .reset() is what fulfills the promise via shared_result_ptr's deleter
shared_result_ptr.reset();
#endif
});
}
// return the result future
return std::move(result_future);
}
size_t unit_shape() const
{
return system_thread_pool().size();
}
};
// compose thread_pool_executor with other fancy executors
// to yield a parallel_thread_pool_executor
using parallel_thread_pool_executor = agency::flattened_executor<
agency::scoped_executor<
thread_pool_executor,
agency::this_thread::parallel_executor
>
>;
// compose thread_pool_executor with other fancy executors
// to yield a parallel_vector_thread_pool_executor
using parallel_vector_thread_pool_executor = agency::flattened_executor<
agency::scoped_executor<
thread_pool_executor,
agency::this_thread::vector_executor
>
>;
} // end detail
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <agency/execution/executor/detail/utility.hpp>
#include <iostream>
#include "../test_executors.hpp"
template<class Executor>
void test_with_void_predecessor_returning_void(Executor exec)
{
agency::executor_shape_t<Executor> shape{100};
auto predecessor_future = agency::make_ready_future<void>(exec);
size_t shared_arg = 0;
size_t increment_me = 0;
std::mutex mut;
auto fut = agency::detail::bulk_then_execute_with_auto_result(exec, [&](size_t, size_t& shared_arg)
{
mut.lock();
increment_me += 1;
++shared_arg;
mut.unlock();
},
shape,
predecessor_future,
[&]
{
return std::ref(shared_arg);
});
fut.wait();
assert(increment_me == shape);
assert(shared_arg == shape);
}
template<class Executor>
void test_with_void_predecessor_returning_results(Executor exec)
{
auto predecessor_future = agency::detail::make_ready_future();
using index_type = agency::executor_index_t<Executor>;
size_t shape = 10;
auto f = agency::detail::bulk_then_execute_with_auto_result(exec,
[](index_type idx, std::vector<int>& shared_arg)
{
return shared_arg[idx];
},
shape,
predecessor_future,
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
using container_type = agency::vector<int, agency::executor_allocator_t<Executor,int>>;
assert(container_type(shape, 13) == result);
}
template<class Executor>
void test_with_non_void_predecessor_returning_void(Executor exec)
{
agency::executor_shape_t<Executor> shape{100};
auto predecessor_future = agency::make_ready_future<int>(exec, 13);
size_t shared_arg = 0;
size_t increment_me = 0;
std::mutex mut;
auto fut = agency::detail::bulk_then_execute_with_auto_result(exec, [&](size_t, int& predecessor, size_t& shared_arg)
{
mut.lock();
increment_me += predecessor;
++shared_arg;
mut.unlock();
},
shape,
predecessor_future,
[&]
{
return std::ref(shared_arg);
});
fut.wait();
assert(increment_me == shape * 13);
assert(shared_arg == shape);
}
template<class Executor>
void test_with_non_void_predecessor_returning_results(Executor exec)
{
auto predecessor_future = agency::make_ready_future<int>(exec, 7);
using index_type = agency::executor_index_t<Executor>;
size_t shape = 10;
auto f = agency::detail::bulk_then_execute_with_auto_result(exec,
[](index_type idx, int& predecessor, std::vector<int>& shared_arg)
{
return predecessor + shared_arg[idx];
},
shape,
predecessor_future,
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
using container_type = agency::vector<int, agency::executor_allocator_t<Executor,int>>;
assert(container_type(shape, 7 + 13) == result);
}
int main()
{
test_with_void_predecessor_returning_void(bulk_synchronous_executor());
test_with_void_predecessor_returning_void(bulk_asynchronous_executor());
test_with_void_predecessor_returning_void(bulk_continuation_executor());
test_with_void_predecessor_returning_void(not_a_bulk_synchronous_executor());
test_with_void_predecessor_returning_void(not_a_bulk_asynchronous_executor());
test_with_void_predecessor_returning_void(not_a_bulk_continuation_executor());
test_with_void_predecessor_returning_void(complete_bulk_executor());
test_with_void_predecessor_returning_results(bulk_synchronous_executor());
test_with_void_predecessor_returning_results(bulk_asynchronous_executor());
test_with_void_predecessor_returning_results(bulk_continuation_executor());
test_with_void_predecessor_returning_results(not_a_bulk_synchronous_executor());
test_with_void_predecessor_returning_results(not_a_bulk_asynchronous_executor());
test_with_void_predecessor_returning_results(not_a_bulk_continuation_executor());
test_with_void_predecessor_returning_results(complete_bulk_executor());
test_with_non_void_predecessor_returning_void(bulk_synchronous_executor());
test_with_non_void_predecessor_returning_void(bulk_asynchronous_executor());
test_with_non_void_predecessor_returning_void(bulk_continuation_executor());
test_with_non_void_predecessor_returning_void(not_a_bulk_synchronous_executor());
test_with_non_void_predecessor_returning_void(not_a_bulk_asynchronous_executor());
test_with_non_void_predecessor_returning_void(not_a_bulk_continuation_executor());
test_with_non_void_predecessor_returning_void(complete_bulk_executor());
test_with_non_void_predecessor_returning_results(bulk_synchronous_executor());
test_with_non_void_predecessor_returning_results(bulk_asynchronous_executor());
test_with_non_void_predecessor_returning_results(bulk_continuation_executor());
test_with_non_void_predecessor_returning_results(not_a_bulk_synchronous_executor());
test_with_non_void_predecessor_returning_results(not_a_bulk_asynchronous_executor());
test_with_non_void_predecessor_returning_results(not_a_bulk_continuation_executor());
test_with_non_void_predecessor_returning_results(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/shape_cast.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class Executor, class Shape>
struct has_unit_shape_impl
{
template<
class Executor1,
class ReturnType = decltype(std::declval<Executor1>().unit_shape()),
__AGENCY_REQUIRES(
std::is_same<ReturnType,Shape>::value
)
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class Executor, class Shape>
using has_unit_shape = typename has_unit_shape_impl<Executor,Shape>::type;
} // end detail
// this overload handles the case when an Executor has .unit_shape()
__agency_exec_check_disable__
template<class E,
__AGENCY_REQUIRES(detail::Executor<E>()),
__AGENCY_REQUIRES(detail::has_unit_shape<E,executor_shape_t<E>>::value)
>
__AGENCY_ANNOTATION
executor_shape_t<E> unit_shape(const E& exec)
{
return exec.unit_shape();
}
// this overload handles the case when an Executor does not have .unit_shape()
__agency_exec_check_disable__
template<class E,
__AGENCY_REQUIRES(detail::Executor<E>()),
__AGENCY_REQUIRES(!detail::has_unit_shape<E,executor_shape_t<E>>::value)
>
__AGENCY_ANNOTATION
executor_shape_t<E> unit_shape(const E&)
{
// by default, an executor's unit shape contains a single point
return detail::shape_cast<executor_shape_t<E>>(1);
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/tuple/tuple_utility.hpp>
#include <type_traits>
#include <iterator>
namespace agency
{
namespace experimental
{
namespace detail
{
// XXX TODO: for completeness, the no iterators case
// XXX might wish to derive from Function to get the empty base class optimization
template<class Function, class Iterator, class... Iterators>
class zip_with_iterator
{
private:
using iterator_tuple_type = agency::tuple<Iterator, Iterators...>;
__AGENCY_ANNOTATION
zip_with_iterator(Function f, const iterator_tuple_type& iterator_tuple)
: f_(f),
iterator_tuple_(iterator_tuple)
{}
__AGENCY_ANNOTATION
const iterator_tuple_type& iterator_tuple() const
{
return iterator_tuple_;
}
public:
using value_type = agency::detail::result_of_t<
Function(
typename std::iterator_traits<Iterator>::reference,
typename std::iterator_traits<Iterators>::reference...
)
>;
using reference = value_type;
// XXX this should actually be some sort of proxy pointer
using pointer = void;
using difference_type = typename std::iterator_traits<
typename std::tuple_element<0,iterator_tuple_type>::type
>::difference_type;
using iterator_category = typename std::iterator_traits<Iterator>::iterator_category;
__AGENCY_ANNOTATION
zip_with_iterator(Function f, Iterator iter, Iterators... iters)
: zip_with_iterator(f, agency::make_tuple(iter, iters...))
{}
__AGENCY_ANNOTATION
Iterator first_iterator() const
{
return agency::get<0>(iterator_tuple());
}
private:
struct increment_functor
{
template<class OtherIterator>
__AGENCY_ANNOTATION
void operator()(OtherIterator& iter)
{
++iter;
}
};
public:
__AGENCY_ANNOTATION
void operator++()
{
__tu::tuple_for_each(increment_functor(), iterator_tuple_);
}
private:
struct dereference_functor
{
template<class OtherIterator>
__AGENCY_ANNOTATION
typename std::iterator_traits<OtherIterator>::reference
operator()(OtherIterator iter)
{
return *iter;
}
};
public:
__AGENCY_ANNOTATION
reference operator*() const
{
// this calls dereference_functor() on each element of iterator_tuple_,
// and passes the results of these dereferences to an invocation of f_()
return __tu::tuple_map_with_make(dereference_functor(), f_, iterator_tuple_);
}
private:
struct add_assign_functor
{
difference_type rhs;
template<class OtherIterator>
__AGENCY_ANNOTATION
void operator()(OtherIterator& iter) const
{
iter += rhs;
}
};
public:
__AGENCY_ANNOTATION
zip_with_iterator& operator+=(difference_type n)
{
__tu::tuple_for_each(add_assign_functor{n}, iterator_tuple_);
return *this;
}
__AGENCY_ANNOTATION
zip_with_iterator operator+(difference_type n) const
{
zip_with_iterator result = *this;
result += n;
return result;
}
__AGENCY_ANNOTATION
reference operator[](difference_type i) const
{
auto tmp = *this;
tmp += i;
return *tmp;
}
// these operators are implemented as friend functions to allow interoperation with zip_with_iterators whose
// constituent iterator types are related to ours
// they're implemented with return type deduction to enable/disable them with SFINAE
// equal with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator==(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() == rhs.first_iterator())
{
return lhs.first_iterator() == rhs.first_iterator();
}
// not equal with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator!=(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() != rhs.first_iterator())
{
return lhs.first_iterator() != rhs.first_iterator();
}
// less than other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator<(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() < rhs.first_iterator())
{
return lhs.first_iterator() < rhs.first_iterator();
}
// less than equal with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator<=(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() <= rhs.first_iterator())
{
return lhs.first_iterator() <= rhs.first_iterator();
}
// greater than other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator>(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() > rhs.first_iterator())
{
return lhs.first_iterator() > rhs.first_iterator();
}
// greater than equal with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator>=(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() >= rhs.first_iterator())
{
return lhs.first_iterator() >= rhs.first_iterator();
}
// minus other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator-(const zip_with_iterator& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() - rhs.first_iterator())
{
return lhs.first_iterator() - rhs.first_iterator();
}
private:
Function f_;
iterator_tuple_type iterator_tuple_;
};
template<class Function, class... Iterators>
__AGENCY_ANNOTATION
zip_with_iterator<Function,Iterators...> make_zip_with_iterator(Function f, Iterators... iters)
{
return zip_with_iterator<Function,Iterators...>(f,iters...);
}
// because the only iterator we use when testing a zip_with_iterator for equality
// or arithmetic is the first iterator in the tuple, it's wasteful to use
// a full zip_with_iterator to represent the end of a zip_with_range
// So, introduce a zip_with_sentinel that only stores a single iterator
//
// XXX TODO: for completeness, the no iterators case
template<class Function, class Iterator, class... Iterators>
class zip_with_sentinel
{
public:
using base_iterator_type = Iterator;
// XXX in addition to their existing restrictions,
// these function templates should probably also requires that Iterators... are constructible from OtherIterators...
// and that Function is constructible from OtherFunction
template<class OtherIterator,
class = typename std::enable_if<
std::is_constructible<base_iterator_type,OtherIterator>::value
>::type>
__AGENCY_ANNOTATION
zip_with_sentinel(OtherIterator end)
: end_(end)
{}
template<class OtherFunction, class OtherIterator, class... OtherIterators,
class = typename std::enable_if<
std::is_constructible<base_iterator_type,OtherIterator>::value
>::type>
__AGENCY_ANNOTATION
zip_with_sentinel(const zip_with_iterator<OtherFunction,OtherIterator,OtherIterators...>& end)
: zip_with_sentinel(end.first_iterator())
{}
__AGENCY_ANNOTATION
const base_iterator_type& base() const
{
return end_;
}
// XXX TODO: implement all relational operators as friend functions using SFINAE to enable/disable them
// equality comparison with other zip_with_sentinel
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator==(const zip_with_sentinel& lhs, const zip_with_sentinel<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() == rhs.base())
{
return lhs.base() == rhs.base();
}
// equality comparison with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator==(const zip_with_sentinel& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() == rhs.first_iterator())
{
return lhs.base() == rhs.first_iterator();
}
// equality comparison with other zip_with_iterator (with sentinel on the right hand side)
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator==(const zip_with_iterator<OtherIterator,OtherIterators...>& lhs, const zip_with_sentinel& rhs) ->
decltype(lhs.first_iterator() == std::declval<Iterator>())
{
return lhs.first_iterator() == rhs.base();
}
// inequality comparison with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator!=(const zip_with_sentinel& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() != rhs.first_iterator())
{
return lhs.base() != rhs.first_iterator();
}
// inequality comparison with other zip_with_iterator (with sentinel on the right hand side)
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator!=(const zip_with_iterator<OtherIterator,OtherIterators...>& lhs, const zip_with_sentinel& rhs) ->
decltype(lhs.first_iterator() != std::declval<Iterator>())
{
return lhs.first_iterator() != rhs.base();
}
// less than with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator<(const zip_with_sentinel& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() < rhs.first_iterator())
{
return lhs.base() < rhs.first_iterator();
}
// less than equal with other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator<=(const zip_with_sentinel& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() <= rhs.first_iterator())
{
return lhs.base() <= rhs.first_iterator();
}
// minus other zip_with_iterator
template<class OtherIterator, class... OtherIterators>
__AGENCY_ANNOTATION
friend auto operator-(const zip_with_sentinel& lhs, const zip_with_iterator<OtherIterator,OtherIterators...>& rhs) ->
decltype(std::declval<Iterator>() - rhs.first_iterator())
{
return lhs.base() - rhs.first_iterator();
}
private:
base_iterator_type end_;
};
} // end detail
template<class Function, class... Ranges>
class zip_with_view
{
public:
using iterator = detail::zip_with_iterator<
Function,
range_iterator_t<Ranges>...
>;
using sentinel = detail::zip_with_sentinel<
Function,
range_sentinel_t<Ranges>...
>;
__AGENCY_ANNOTATION
zip_with_view() = default;
__AGENCY_ANNOTATION
zip_with_view(const zip_with_view& other) = default;
template<class OtherRange, class... OtherRanges,
__AGENCY_REQUIRES(
std::is_constructible<
iterator,
Function, range_iterator_t<OtherRange>, range_iterator_t<OtherRanges>...
>::value &&
std::is_constructible<
sentinel,
range_sentinel_t<OtherRange>
>::value
)>
__AGENCY_ANNOTATION
zip_with_view(Function f, OtherRange&& rng, OtherRanges&&... rngs)
: begin_(f, rng.begin(), rngs.begin()...),
end_(rng.end())
{}
__AGENCY_ANNOTATION
iterator begin() const
{
return begin_;
}
__AGENCY_ANNOTATION
sentinel end() const
{
return end_;
}
__AGENCY_ANNOTATION
typename iterator::reference
operator[](typename iterator::difference_type i)
{
return begin()[i];
}
__AGENCY_ANNOTATION
typename iterator::difference_type
size() const
{
return end() - begin();
}
private:
iterator begin_;
sentinel end_;
};
// zip_with_views are already views, so don't wrap them
template<class Function, class... Ranges>
__AGENCY_ANNOTATION
zip_with_view<Function,Ranges...> all(zip_with_view<Function,Ranges...> v)
{
return v;
}
template<class Function, class... Ranges>
__AGENCY_ANNOTATION
zip_with_view<Function,Ranges...> zip_with(Function f, Ranges&&... ranges)
{
return zip_with_view<Function,Ranges...>(f, std::forward<Ranges>(ranges)...);
}
} // end experimental
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <agency/execution/executor/detail/utility.hpp>
#include <iostream>
#include "../test_executors.hpp"
template<class Executor>
void test_returning_void(Executor exec)
{
agency::executor_shape_t<Executor> shape{100};
size_t increment_me = 0;
std::mutex mut;
agency::detail::bulk_sync_execute_with_auto_result_and_without_shared_parameters(exec, [&](size_t)
{
mut.lock();
increment_me += 1;
mut.unlock();
},
shape);
assert(increment_me == shape);
}
template<class Executor>
void test_returning_results(Executor exec)
{
using index_type = agency::executor_index_t<Executor>;
size_t shape = 10;
auto result = agency::detail::bulk_sync_execute_with_auto_result_and_without_shared_parameters(exec, [](index_type)
{
return 13;
},
shape);
using container_type = agency::vector<int, agency::executor_allocator_t<Executor,int>>;
assert(container_type(shape, 13) == result);
}
int main()
{
test_returning_void(bulk_synchronous_executor());
test_returning_void(bulk_asynchronous_executor());
test_returning_void(bulk_continuation_executor());
test_returning_void(not_a_bulk_synchronous_executor());
test_returning_void(not_a_bulk_asynchronous_executor());
test_returning_void(not_a_bulk_continuation_executor());
test_returning_void(complete_bulk_executor());
test_returning_results(bulk_synchronous_executor());
test_returning_results(bulk_asynchronous_executor());
test_returning_results(bulk_continuation_executor());
test_returning_results(not_a_bulk_synchronous_executor());
test_returning_results(not_a_bulk_asynchronous_executor());
test_returning_results(not_a_bulk_continuation_executor());
test_returning_results(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/memory/allocator.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
#include <agency/detail/shape_cast.hpp>
namespace agency
{
namespace detail
{
// storage takes an optional Shape parameter instead of assuming size_t
// so that multidimensional containers need not store their shape in
// addition to what is maintained by storage
template<class T, class Allocator, class Shape = std::size_t>
class storage
{
public:
using value_type = T;
using shape_type = Shape;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
storage(shape_type shape, const Allocator& allocator = Allocator{})
: data_(nullptr),
shape_(shape),
allocator_(allocator)
{
if(size() > 0)
{
data_ = detail::allocator_traits<Allocator>::allocate(allocator_, size());
if(data_ == nullptr)
{
detail::throw_bad_alloc();
}
}
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
storage(shape_type shape, Allocator&& allocator)
: data_(nullptr),
shape_(shape),
allocator_(std::move(allocator))
{
if(size() > 0)
{
data_ = detail::allocator_traits<Allocator>::allocate(allocator_, size());
if(data_ == nullptr)
{
detail::throw_bad_alloc();
}
}
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
storage(storage&& other)
: data_(other.data_),
shape_(other.shape_),
allocator_(std::move(other.allocator_))
{
// leave the other storage in a valid state
other.data_ = nullptr;
other.shape_ = shape_type{};
}
__AGENCY_ANNOTATION
storage(const Allocator& allocator)
: storage(shape_type{}, allocator)
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
storage()
: storage(Allocator())
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~storage()
{
reset();
}
private:
__agency_exec_check_disable__
__AGENCY_ANNOTATION
void move_assign_allocator(std::true_type, Allocator& other_allocator)
{
// propagate the allocator
allocator_ = std::move(other_allocator);
}
__AGENCY_ANNOTATION
void move_assign_allocator(std::false_type, Allocator&)
{
// do nothing
}
__AGENCY_ANNOTATION
void reset()
{
if(data() != nullptr)
{
detail::allocator_traits<Allocator>::deallocate(allocator(), data(), size());
data_ = nullptr;
shape_ = shape_type{};
}
}
public:
__AGENCY_ANNOTATION
storage& operator=(storage&& other)
{
// we have to call reset() instead of simply swapping ourself with other
// because depending on propagate_on_container_move_assignment, we may need
// to retain our allocator
reset();
detail::adl_swap(data_, other.data_);
detail::adl_swap(shape_, other.shape_);
move_assign_allocator(typename std::allocator_traits<Allocator>::propagate_on_container_move_assignment(), other.allocator());
return *this;
}
__AGENCY_ANNOTATION
T* data()
{
return data_;
}
__AGENCY_ANNOTATION
const T* data() const
{
return data_;
}
__AGENCY_ANNOTATION
shape_type shape() const
{
return shape_;
}
__AGENCY_ANNOTATION
std::size_t size() const
{
return agency::detail::shape_cast<std::size_t>(shape());
}
__AGENCY_ANNOTATION
const Allocator& allocator() const
{
return allocator_;
}
__AGENCY_ANNOTATION
Allocator& allocator()
{
return allocator_;
}
__AGENCY_ANNOTATION
void swap(storage& other)
{
detail::adl_swap(data_, other.data_);
detail::adl_swap(shape_, other.shape_);
detail::adl_swap(allocator_, other.allocator_);
}
private:
T* data_;
shape_type shape_;
Allocator allocator_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/future.hpp>
#include <agency/execution/executor/customization_points/bulk_then_execute.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/type_traits.hpp>
namespace agency
{
__agency_exec_check_disable__
template<class E, class Function, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(detail::BulkAsynchronousExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<ResultFactory()>
>
bulk_async_execute(E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, Factories... shared_factories)
{
return exec.bulk_async_execute(f, shape, result_factory, shared_factories...);
}
__agency_exec_check_disable__
template<class E, class Function, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(detail::BulkExecutor<E>() && !detail::BulkAsynchronousExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<ResultFactory()>
>
bulk_async_execute(E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, Factories... shared_factories)
{
using void_future_type = executor_future_t<E,void>;
// XXX we might want to actually allow the executor to participate here
auto predecessor = future_traits<void_future_type>::make_ready();
return agency::bulk_then_execute(exec, f, shape, predecessor, result_factory, shared_factories...);
}
} // end agency
<file_sep>#pragma once
#include <agency/execution/execution_categories.hpp>
#include <agency/tuple.hpp>
#include <utility>
namespace agency
{
namespace detail
{
// execution is scoped, just return x
template<class ExecutionCategory1, class ExecutionCategory2, class T>
__AGENCY_ANNOTATION
T make_tuple_if_not_scoped(agency::scoped_execution_tag<ExecutionCategory1,ExecutionCategory2>, const T& x)
{
return x;
}
// execution is not scoped, wrap up x in a tuple
template<class ExecutionCategory, class T>
__AGENCY_ANNOTATION
agency::tuple<T> make_tuple_if_not_scoped(ExecutionCategory, const T& x)
{
return agency::make_tuple(x);
}
template<class ExecutionCategory, class T>
__AGENCY_ANNOTATION
auto make_tuple_if_not_scoped(const T& x)
-> decltype(agency::detail::make_tuple_if_not_scoped(ExecutionCategory(), x))
{
return agency::detail::make_tuple_if_not_scoped(ExecutionCategory(), x);
}
template<class ExecutionCategory1, class ExecutionCategory2, class T>
__AGENCY_ANNOTATION
auto tie_if_not_scoped(scoped_execution_tag<ExecutionCategory1,ExecutionCategory2>, T&& x)
-> decltype(std::forward<T>(x))
{
return std::forward<T>(x);
}
template<class ExecutionCategory, class T>
__AGENCY_ANNOTATION
auto tie_if_not_scoped(ExecutionCategory, T&& x)
-> decltype(agency::tie(std::forward<T>(x)))
{
return agency::tie(std::forward<T>(x));
}
template<class ExecutionCategory, class T>
__AGENCY_ANNOTATION
auto tie_if_not_scoped(T&& x)
-> decltype(tie_if_not_scoped(ExecutionCategory(), std::forward<T>(x)))
{
return tie_if_not_scoped(ExecutionCategory(), std::forward<T>(x));
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <type_traits>
#include <cstdint>
#include <cstddef>
#include <limits>
namespace agency
{
namespace experimental
{
// a bounded_integer represents an integer known at compile time to be no greater than a given bound
// if arithmetic operations would cause overflow beyond the bound, the value of the resulting bounded_integer is undefined
template<class Integer, Integer bound>
class bounded_integer
{
public:
// XXX in principle, this class template could probably work for all arithmetic types
static_assert(std::is_integral<Integer>::value, "Integer must be an integer type.");
using value_type = Integer;
static const value_type static_bound = bound;
__AGENCY_ANNOTATION
constexpr bounded_integer() = default;
__AGENCY_ANNOTATION
constexpr bounded_integer(const bounded_integer&) = default;
// if number > bound, the value of the bounded_integer is undefined
template<class Number,
class = typename std::enable_if<
std::is_constructible<value_type, Number>::value
>::type>
__AGENCY_ANNOTATION
constexpr bounded_integer(const Number& number)
: value_(number)
{}
// access the stored value
__AGENCY_ANNOTATION
constexpr const value_type& value() const
{
return value_;
}
// allow conversions to the value_type
__AGENCY_ANNOTATION
constexpr operator const value_type& () const
{
return value();
}
// operator members follow
__AGENCY_ANNOTATION
bounded_integer& operator=(const bounded_integer&) = default;
// assign
__AGENCY_ANNOTATION
bounded_integer& operator=(const value_type& other)
{
value_ = other;
return *this;
}
// pre increment
__AGENCY_ANNOTATION
bounded_integer& operator++()
{
++value_;
return *this;
}
// post-increment
__AGENCY_ANNOTATION
bounded_integer operator++(int)
{
bounded_integer result = *this;
value_++;
return result;
}
// plus-assign
__AGENCY_ANNOTATION
bounded_integer& operator+=(const value_type& rhs)
{
value_ += rhs;
return *this;
}
// minus-assign
__AGENCY_ANNOTATION
bounded_integer& operator-=(const value_type& rhs)
{
value_ -= rhs;
return *this;
}
// multiply-assign
__AGENCY_ANNOTATION
bounded_integer& operator*=(const value_type& rhs)
{
value_ *= rhs;
return *this;
}
// divide-assign
__AGENCY_ANNOTATION
bounded_integer& operator/=(const value_type& rhs)
{
value_ /= rhs;
return *this;
}
// modulus-assign
__AGENCY_ANNOTATION
bounded_integer& operator%=(const value_type& rhs)
{
value_ %= rhs;
return *this;
}
// left shift-assign
__AGENCY_ANNOTATION
bounded_integer& operator<<=(const value_type& rhs)
{
value_ <<= rhs;
return *this;
}
// right shift-assign
__AGENCY_ANNOTATION
bounded_integer& operator>>=(const value_type& rhs)
{
value_ >>= rhs;
return *this;
}
// unary negate
__AGENCY_ANNOTATION
constexpr bool operator!() const
{
return !value();
}
// conversion to bool
__AGENCY_ANNOTATION
explicit constexpr operator bool() const
{
return value();
}
private:
// XXX in principle, since we know the largest value this type can store,
// we could potentially implement the storage for the value with a
// narrower type
using storage_type = value_type;
storage_type value_;
};
// operators follow
// * the reason these are defined with trailing return type is two-fold:
// 1. to allow SFINAE to remove them from the overload set for operations on types which do not make sense
// 2. to deduce the result of arithmetic operations on mixed types
// * the reason there are three overloads for each operator is
// 1. to allow the bounded_integer to appear on either side of an expression
// 2. to disambiguate operations in which a bounded_integer appears on both sides of an expression
// plus
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator+(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() + rhs)
{
return lhs.value() + rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator+(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs + rhs.value())
{
return lhs + rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator+(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs + rhs.value())
{
return lhs.value() + rhs.value();
}
// minus
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator-(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() - rhs)
{
return lhs.value() - rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator-(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs - rhs.value())
{
return lhs - rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator-(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs - rhs.value())
{
return lhs.value() - rhs.value();
}
// multiply
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator*(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() * rhs)
{
return lhs.value() * rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator*(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs * rhs.value())
{
return lhs * rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator*(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs * rhs.value())
{
return lhs.value() * rhs.value();
}
// divide
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator/(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() / rhs)
{
return lhs.value() / rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator/(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs / rhs.value())
{
return lhs / rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator/(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs / rhs.value())
{
return lhs.value() / rhs.value();
}
// modulus
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator%(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() % rhs)
{
return lhs.value() % rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator%(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs % rhs.value())
{
return lhs % rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator%(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs % rhs.value())
{
return lhs.value() % rhs.value();
}
// left shift
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator<<(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() << rhs)
{
return lhs.value() << rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator<<(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs << rhs.value())
{
return lhs << rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator<<(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs << rhs.value())
{
return lhs.value() << rhs.value();
}
// right shift
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator>>(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() >> rhs)
{
return lhs.value() >> rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator>>(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs >> rhs.value())
{
return lhs >> rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator>>(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs >> rhs.value())
{
return lhs.value() >> rhs.value();
}
// equal
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator==(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() == rhs)
{
return lhs.value() == rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator==(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs == rhs.value())
{
return lhs == rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator==(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs == rhs.value())
{
return lhs.value() == rhs.value();
}
// not equal
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator!=(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() != rhs)
{
return lhs.value() != rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator!=(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs != rhs.value())
{
return lhs != rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator!=(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs != rhs.value())
{
return lhs.value() != rhs.value();
}
// less
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator<(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() < rhs)
{
return lhs.value() < rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator<(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs < rhs.value())
{
return lhs < rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator<(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs < rhs.value())
{
return lhs.value() < rhs.value();
}
// greater
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator>(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() > rhs)
{
return lhs.value() > rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator>(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs > rhs.value())
{
return lhs > rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator>(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs > rhs.value())
{
return lhs.value() > rhs.value();
}
// less equal
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator<=(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() <= rhs)
{
return lhs.value() <= rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator<=(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs <= rhs.value())
{
return lhs <= rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator<=(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs <= rhs.value())
{
return lhs.value() <= rhs.value();
}
// greater equal
template<class Integer, Integer bound, class Number>
__AGENCY_ANNOTATION
constexpr auto operator>=(const bounded_integer<Integer,bound>& lhs, const Number& rhs) ->
decltype(lhs.value() >= rhs)
{
return lhs.value() >= rhs;
}
template<class Number, class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator>=(const Number& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs >= rhs.value())
{
return lhs >= rhs.value();
}
template<class Integer, Integer bound>
__AGENCY_ANNOTATION
constexpr auto operator>=(const bounded_integer<Integer,bound>& lhs, const bounded_integer<Integer,bound>& rhs) ->
decltype(lhs >= rhs.value())
{
return lhs.value() >= rhs.value();
}
// define some aliases for common integer types
template<int bound>
using bounded_int = bounded_integer<int, bound>;
template<unsigned int bound>
using bounded_uint = bounded_integer<unsigned int, bound>;
template<short bound>
using bounded_short = bounded_integer<short, bound>;
template<unsigned short bound>
using bounded_ushort = bounded_integer<unsigned short, bound>;
template<std::int8_t bound>
using bounded_int8_t = bounded_integer<std::int8_t, bound>;
template<std::int16_t bound>
using bounded_int16_t = bounded_integer<std::int16_t, bound>;
template<std::int32_t bound>
using bounded_int32_t = bounded_integer<std::int32_t, bound>;
template<std::int64_t bound>
using bounded_int64_t = bounded_integer<std::int64_t, bound>;
template<std::uint8_t bound>
using bounded_uint8_t = bounded_integer<std::uint8_t, bound>;
template<std::uint16_t bound>
using bounded_uint16_t = bounded_integer<std::uint16_t, bound>;
template<std::uint32_t bound>
using bounded_uint32_t = bounded_integer<std::uint32_t, bound>;
template<std::uint64_t bound>
using bounded_uint64_t = bounded_integer<std::uint64_t, bound>;
template<std::size_t bound>
using bounded_size_t = bounded_integer<std::size_t, bound>;
} // end experimental
} // end agency
// specialize std::numeric_limits for bounded_integer
namespace std
{
template<class Integer, Integer bound>
class numeric_limits<agency::experimental::bounded_integer<Integer,bound>>
: public numeric_limits<Integer> // inherit the functionality of numeric_limits<Integer>
{
public:
// we can provide a specialization of max() since we know the bound
__AGENCY_ANNOTATION
static constexpr Integer max()
{
return bound;
}
};
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/detail/member_future_or.hpp>
#include <agency/future.hpp>
#include <future>
#include <type_traits>
#include <utility>
namespace agency
{
namespace detail
{
template<class Executor, class Function, class Future>
struct has_then_execute_impl
{
using result_type = result_of_continuation_t<Function,Future>;
using expected_future_type = member_future_or_t<Executor,result_type,std::future>;
template<class Executor1,
class ReturnType = decltype(
std::declval<Executor1>().then_execute(
std::declval<Function>(),
std::declval<Future&>()
)
),
class = typename std::enable_if<
std::is_same<ReturnType,expected_future_type>::value
>::type>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class Executor, class Function, class Future>
using has_then_execute = typename has_then_execute_impl<Executor, Function, Future>::type;
template<class T>
struct is_continuation_executor_impl
{
// types related to functions passed to .then_execute()
using result_type = int;
using predecessor_type = int;
using predecessor_future_type = member_future_or_t<T,predecessor_type,std::future>;
// the functions we'll pass to .then_execute() to test
// XXX WAR nvcc 8.0 bug
//using test_function = std::function<result_type(predecessor_type&)>;
struct test_function
{
result_type operator()(predecessor_type&);
};
using type = has_then_execute<
T,
test_function,
predecessor_future_type
>;
};
} // end detail
template<class T>
using is_continuation_executor = typename detail::is_continuation_executor_impl<T>::type;
namespace detail
{
// a fake Concept to use with __AGENCY_REQUIRES
template<class T>
constexpr bool ContinuationExecutor()
{
return is_continuation_executor<T>();
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits/executor_allocator.hpp>
#include <agency/execution/executor/executor_traits/executor_execution_category.hpp>
#include <agency/execution/executor/executor_traits/executor_execution_depth.hpp>
#include <agency/execution/executor/executor_traits/executor_future.hpp>
#include <agency/execution/executor/executor_traits/executor_index.hpp>
#include <agency/execution/executor/executor_traits/executor_shape.hpp>
#include <agency/execution/executor/executor_traits/is_asynchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_asynchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_continuation_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_synchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_continuation_executor.hpp>
#include <agency/execution/executor/executor_traits/is_executor.hpp>
#include <agency/execution/executor/executor_traits/is_simple_executor.hpp>
#include <agency/execution/executor/executor_traits/is_synchronous_executor.hpp>
<file_sep>#include <iostream>
#include <cassert>
#include <agency/execution/executor/customization_points/make_ready_future.hpp>
#include <agency/execution/executor/customization_points/future_cast.hpp>
#include "../test_executors.hpp"
struct continuation_executor_with_future_cast : continuation_executor
{
continuation_executor_with_future_cast()
: function_called_(false)
{}
~continuation_executor_with_future_cast()
{
assert(function_called_);
}
template<class T, class Future>
std::future<T> future_cast(Future& fut)
{
function_called_ = true;
return agency::future_traits<Future>::template cast<T>(fut);
}
bool function_called_;
};
struct bulk_continuation_executor_with_future_cast : bulk_continuation_executor
{
bulk_continuation_executor_with_future_cast()
: function_called_(false)
{}
~bulk_continuation_executor_with_future_cast()
{
assert(function_called_);
}
template<class T, class Future>
std::future<T> future_cast(Future& fut)
{
function_called_ = true;
return agency::future_traits<Future>::template cast<T>(fut);
}
bool function_called_;
};
template<class Executor>
void test(Executor&& exec)
{
// cast to a void future
{
auto from = agency::make_ready_future<int>(exec, 13);
auto to = agency::future_cast<void>(exec, from);
assert(!from.valid());
assert(to.valid());
to.wait();
}
// cast from an int to an unsigned int future
{
auto from = agency::make_ready_future<int>(exec, 13);
auto to = agency::future_cast<unsigned int>(exec, from);
assert(!from.valid());
assert(to.valid());
assert(to.get() == 13);
}
}
int main()
{
test(bulk_synchronous_executor());
test(bulk_asynchronous_executor());
test(bulk_continuation_executor());
test(not_a_bulk_synchronous_executor());
test(not_a_bulk_asynchronous_executor());
test(not_a_bulk_continuation_executor());
test(complete_bulk_executor());
test(continuation_executor_with_future_cast());
test(bulk_continuation_executor_with_future_cast());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/experimental/variant.hpp>
#include <agency/future/variant_future.hpp>
#include <agency/future/detail/future_sum.hpp>
#include <agency/coordinate/detail/shape/common_shape.hpp>
#include <agency/memory/allocator/detail/allocator_sum.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/tuple.hpp>
#include <type_traits>
#include <utility>
#include <typeinfo>
namespace agency
{
template<class Executor, class... Executors>
class variant_executor
{
private:
using variant_type = experimental::variant<Executor,Executors...>;
public:
/// variant_executor's execution category provides the strongest guarantee
/// permitted by the union of the categories of the alternative executors
/// When no static guarantee may be provided, the category is dynamic_execution_tag.
using execution_category = detail::common_execution_category_t<
executor_execution_category_t<Executor>,
executor_execution_category_t<Executors>...
>;
// XXX consider providing a way to query the runtime value of the category through
// a function member
// it could return dynamic_execution_tag, and this category could have a runtime value
private:
static constexpr size_t execution_depth = detail::execution_depth<execution_category>::value;
public:
/// variant_executor's associated future type is sum type for futures.
// This type is a variant_future whose alternatives are taken from the list of the alternative executors' futures.
/// Only the unique futures in this list are alternatives of this variant_future.
/// If there is only a single unique future type, then variant_executor's associated future is simply that unique future.
template<class T>
using future = detail::future_sum_t<
executor_future_t<Executor,T>,
executor_future_t<Executors,T>...
>;
using shape_type = detail::common_shape_t<
executor_shape_t<Executor>,
executor_shape_t<Executors>...
>;
/// variant_executor's associated allocator is a sum type for allocators.
/// This type is a variant_allocator whose alternatives are taken from the list of the alternative executors' allocators.
/// Only the unique allocators in this list are alternative of this variant_allocator.
/// If there is only a single unique allocator type, then variant_allocator's associated allocator is simply that unique allocator.
template<class T>
using allocator = detail::allocator_sum_t<
executor_allocator_t<Executor,T>,
executor_allocator_t<Executors,T>...
>;
__AGENCY_ANNOTATION
variant_executor() = default;
__AGENCY_ANNOTATION
variant_executor(const variant_executor& other) = default;
template<class OtherExecutor,
__AGENCY_REQUIRES(
std::is_constructible<variant_type, OtherExecutor&&>::value
)>
__AGENCY_ANNOTATION
variant_executor(OtherExecutor&& other)
: variant_(std::forward<OtherExecutor>(other))
{}
private:
struct type_visitor
{
template<class E>
const std::type_info& operator()(const E& exec) const
{
return typeid(exec);
}
};
public:
const std::type_info& type() const
{
return experimental::visit(type_visitor(), variant_);
}
__AGENCY_ANNOTATION
std::size_t index() const
{
return variant_.index();
}
// customization points follow in alphabetical order
//
// the implementation of each follows the same pattern:
// 1. define one (possibly two) visitor types that visit an alternative executor
// and call the corresponding customization point
// 2. the member function creates a visitor of the appropriate type and calls experimental::visit
//
// There's nothing particularly interesting happening -- the goal of each member function is simply
// to forward its parameters to the active alternative via visitation.
//
// Two notes:
//
// 1. Our shape_type must be converted to the alternative's shape_type inside the visitor via shape_cast()
//
// 2. Functions that take a future as a parameter have specializations for foreign Futures
// and variant_futures. When a variant_future is encountered, the visitor visits both the variant_executor and variant_future
// simultaneously.
// async_execute
private:
template<class FunctionRef>
struct async_execute_visitor
{
FunctionRef f;
__agency_exec_check_disable__
template<class E>
__AGENCY_ANNOTATION
future<detail::result_of_t<detail::decay_t<FunctionRef>()>>
operator()(E& exec) const
{
return agency::async_execute(exec, std::forward<FunctionRef>(f));
}
};
public:
template<class Function>
__AGENCY_ANNOTATION
future<detail::result_of_t<detail::decay_t<Function>()>>
async_execute(Function&& f)
{
return experimental::visit(async_execute_visitor<Function&&>{std::forward<Function>(f)}, variant_);
}
// bulk_async_execute
private:
template<class Function, class ResultFactory, class... SharedFactories>
struct bulk_async_execute_visitor
{
Function f;
shape_type shape;
ResultFactory result_factory;
tuple<SharedFactories...> shared_factories;
__agency_exec_check_disable__
template<class E, size_t... Indices>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
impl(detail::index_sequence<Indices...>, E& exec) const
{
// cast from our shape type to E's shape type
executor_shape_t<E> casted_shape = detail::shape_cast<executor_shape_t<E>>(shape);
return agency::bulk_async_execute(exec, f, casted_shape, result_factory, agency::get<Indices>(shared_factories)...);
}
__agency_exec_check_disable__
template<class E>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
operator()(E& exec) const
{
return impl(detail::make_index_sequence<sizeof...(SharedFactories)>(), exec);
}
};
public:
template<class Function, class ResultFactory, class... SharedFactories,
__AGENCY_REQUIRES(execution_depth == sizeof...(SharedFactories))
>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
bulk_async_execute(Function f, shape_type shape, ResultFactory result_factory, SharedFactories... shared_factories)
{
auto visitor = bulk_async_execute_visitor<Function,ResultFactory,SharedFactories...>{f, shape, result_factory, agency::make_tuple(shared_factories...)};
return experimental::visit(visitor, variant_);
}
// bulk_sync_execute
private:
template<class Function, class ResultFactory, class... SharedFactories>
struct bulk_sync_execute_visitor
{
Function f;
shape_type shape;
ResultFactory result_factory;
tuple<SharedFactories...> shared_factories;
template<class E, size_t... Indices>
__AGENCY_ANNOTATION
detail::result_of_t<ResultFactory()>
impl(detail::index_sequence<Indices...>, E& exec) const
{
// cast from our shape type to E's shape type
executor_shape_t<E> casted_shape = detail::shape_cast<executor_shape_t<E>>(shape);
return agency::bulk_sync_execute(exec, f, casted_shape, result_factory, agency::get<Indices>(shared_factories)...);
}
template<class E>
__AGENCY_ANNOTATION
detail::result_of_t<ResultFactory()>
operator()(E& exec) const
{
return impl(detail::make_index_sequence<sizeof...(SharedFactories)>(), exec);
}
};
public:
template<class Function, class ResultFactory, class... SharedFactories,
__AGENCY_REQUIRES(execution_depth == sizeof...(SharedFactories))
>
__AGENCY_ANNOTATION
detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, shape_type shape, ResultFactory result_factory, SharedFactories... shared_factories)
{
auto visitor = bulk_sync_execute_visitor<Function,ResultFactory,SharedFactories...>{f, shape, result_factory, agency::make_tuple(shared_factories...)};
return experimental::visit(visitor, variant_);
}
// bulk_then_execute
private:
// this is a unary visitor that only visits variant_executor
template<class Function, class Future, class ResultFactory, class... SharedFactories>
struct bulk_then_execute_visitor1
{
Function f;
shape_type shape;
Future& predecessor_future;
ResultFactory result_factory;
tuple<SharedFactories...> shared_factories;
template<class E, size_t... Indices>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
impl(detail::index_sequence<Indices...>, E& exec) const
{
// cast from our shape type to E's shape type
executor_shape_t<E> casted_shape = detail::shape_cast<executor_shape_t<E>>(shape);
return agency::bulk_then_execute(exec, f, casted_shape, predecessor_future, result_factory, agency::get<Indices>(shared_factories)...);
}
template<class E>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
operator()(E& exec) const
{
return impl(detail::make_index_sequence<sizeof...(SharedFactories)>(), exec);
}
};
// this is a binary visitor that visits variant_executor & variant_future simultaneously
template<class Function, class ResultFactory, class... SharedFactories>
struct bulk_then_execute_visitor2
{
Function f;
shape_type shape;
ResultFactory result_factory;
tuple<SharedFactories...> shared_factories;
__agency_exec_check_disable__
template<class E, class VariantFuture, size_t... Indices>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
impl(detail::index_sequence<Indices...>, E& exec, VariantFuture& predecessor_future) const
{
// cast from our shape type to E's shape type
executor_shape_t<E> casted_shape = detail::shape_cast<executor_shape_t<E>>(shape);
return agency::bulk_then_execute(exec, f, casted_shape, predecessor_future, result_factory, agency::get<Indices>(shared_factories)...);
}
__agency_exec_check_disable__
template<class E, class Future>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
operator()(E& exec, Future& predecessor_future) const
{
return impl(detail::make_index_sequence<sizeof...(SharedFactories)>(), exec, predecessor_future);
}
};
public:
// this overload of bulk_then_execute() is for the case when Future is an instance of variant_future
template<class Function, class VariantFuture, class ResultFactory, class... SharedFactories,
__AGENCY_REQUIRES(execution_depth == sizeof...(SharedFactories)),
__AGENCY_REQUIRES(detail::is_variant_future<VariantFuture>::value)
>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
bulk_then_execute(Function f, shape_type shape,
VariantFuture& predecessor_future,
ResultFactory result_factory,
SharedFactories... shared_factories)
{
auto visitor = bulk_then_execute_visitor2<Function,ResultFactory,SharedFactories...>{f, shape, result_factory, agency::make_tuple(shared_factories...)};
auto future_variant = predecessor_future.variant();
return experimental::visit(visitor, variant_, future_variant);
}
// this overload of bulk_then_execute() is for the case when Future is not an instance of variant_future
template<class Function, class Future, class ResultFactory, class... SharedFactories,
__AGENCY_REQUIRES(execution_depth == sizeof...(SharedFactories)),
__AGENCY_REQUIRES(!detail::is_variant_future<Future>::value)
>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
bulk_then_execute(Function f, shape_type shape,
Future& predecessor_future,
ResultFactory result_factory,
SharedFactories... shared_factories)
{
auto visitor = bulk_then_execute_visitor1<Function,Future,ResultFactory,SharedFactories...>{f, shape, predecessor_future, result_factory, agency::make_tuple(shared_factories...)};
return experimental::visit(visitor, variant_);
}
private:
// this is a unary visitor that only visits variant_executor
template<class T, class VariantFuture>
struct future_cast_visitor1
{
VariantFuture& fut;
template<class E>
__AGENCY_ANNOTATION
future<T> operator()(E& exec) const
{
return agency::future_cast<T>(exec, fut);
}
};
// this is a binary visitor that visits variant_executor & variant_future simultaneously
template<class T>
struct future_cast_visitor2
{
template<class E, class Future>
__AGENCY_ANNOTATION
future<T> operator()(E& exec, Future& fut) const
{
return agency::future_cast<T>(exec, fut);
}
};
public:
// this overload of future_cast() is for the case when the future to cast is an instance of variant_future
template<class T, class VariantFuture,
__AGENCY_REQUIRES(detail::is_variant_future<VariantFuture>::value)
>
__AGENCY_ANNOTATION
future<T> future_cast(VariantFuture& fut)
{
auto visitor = future_cast_visitor2<T>();
auto future_variant = fut.variant();
return experimental::visit(visitor, variant_, future_variant);
}
// this overload of future_cast() is for the case when the future to cast is not an instance of variant_future
template<class T, class Future,
__AGENCY_REQUIRES(!detail::is_variant_future<Future>::value)
>
__AGENCY_ANNOTATION
future<T> future_cast(Future& fut)
{
auto visitor = future_cast_visitor1<T,Future>{fut};
return experimental::visit(visitor, variant_);
}
// make_ready_future
private:
template<class T, class... Args>
struct make_ready_future_visitor
{
tuple<Args...> args;
__agency_exec_check_disable__
template<class E, size_t... Indices>
__AGENCY_ANNOTATION
future<T> impl(detail::index_sequence<Indices...>, E& exec) const
{
return agency::make_ready_future<T>(exec, agency::get<Indices>(args)...);
}
__agency_exec_check_disable__
template<class E>
__AGENCY_ANNOTATION
future<T> operator()(E& exec) const
{
return impl(detail::make_index_sequence<sizeof...(Args)>(), exec);
}
};
public:
template<class T, class... Args>
__AGENCY_ANNOTATION
future<T> make_ready_future(Args&&... args)
{
auto args_tuple = agency::forward_as_tuple(std::forward<Args>(args)...);
auto visitor = make_ready_future_visitor<T,Args&&...>{args_tuple};
return experimental::visit(visitor, variant_);
}
// max_shape_dimensions
private:
struct max_shape_dimensions_visitor
{
template<class E>
__AGENCY_ANNOTATION
shape_type operator()(const E& exec) const
{
return detail::shape_cast<shape_type>(agency::max_shape_dimensions(exec));
}
};
public:
__AGENCY_ANNOTATION
shape_type max_shape_dimensions() const
{
return experimental::visit(max_shape_dimensions_visitor(), variant_);
}
// sync_execute
private:
template<class FunctionRef>
struct sync_execute_visitor
{
FunctionRef f;
template<class E>
__AGENCY_ANNOTATION
detail::result_of_t<detail::decay_t<FunctionRef>()>
operator()(E& exec) const
{
return agency::sync_execute(exec, std::forward<FunctionRef>(f));
}
};
public:
template<class Function>
__AGENCY_ANNOTATION
detail::result_of_t<detail::decay_t<Function>()>
sync_execute(Function&& f)
{
return experimental::visit(sync_execute_visitor<Function&&>{std::forward<Function>(f)}, variant_);
}
// then_execute
private:
// this is a unary visitor that only visits variant_executor
template<class FunctionRef, class Future>
struct then_execute_visitor1
{
FunctionRef f;
Future& predecessor_future;
template<class E>
__AGENCY_ANNOTATION
future<detail::result_of_continuation_t<detail::decay_t<FunctionRef>, Future>>
operator()(E& exec) const
{
return agency::then_execute(exec, std::forward<FunctionRef>(f), predecessor_future);
}
};
// this is a binary visitor that visits variant_executor & variant_future simultaneously
template<class FunctionRef>
struct then_execute_visitor2
{
FunctionRef f;
template<class E, class Future>
__AGENCY_ANNOTATION
future<detail::result_of_continuation_t<detail::decay_t<FunctionRef>, Future>>
operator()(E& exec, Future& predecessor_future) const
{
return agency::then_execute(exec, std::forward<FunctionRef>(f), predecessor_future);
}
};
public:
// this overload of then_execute() is for the case when the predecessor future is an instance of variant_future
template<class Function, class VariantFuture,
__AGENCY_REQUIRES(detail::is_variant_future<VariantFuture>::value)
>
__AGENCY_ANNOTATION
future<detail::result_of_continuation_t<detail::decay_t<Function>, VariantFuture>>
then_execute(Function&& f, VariantFuture& predecessor_future)
{
auto visitor = then_execute_visitor2<Function&&>{std::forward<Function>(f)};
auto future_variant = predecessor_future.variant();
return experimental::visit(visitor, variant_, future_variant);
}
// this overload of then_execute() is for the case when the predecessor future is not an instance of variant_future
template<class Function, class Future,
__AGENCY_REQUIRES(!detail::is_variant_future<Future>::value)
>
__AGENCY_ANNOTATION
future<detail::result_of_continuation_t<detail::decay_t<Function>, Future>>
then_execute(Function&& f, Future& predecessor_future)
{
auto visitor = then_execute_visitor1<Function&&,Future>{std::forward<Function>(f), predecessor_future};
return experimental::visit(visitor, variant_);
}
// unit_shape
private:
struct unit_shape_visitor
{
template<class E>
__AGENCY_ANNOTATION
shape_type operator()(const E& exec) const
{
auto result = agency::unit_shape(exec);
return detail::shape_cast<shape_type>(result);
}
};
public:
__AGENCY_ANNOTATION
shape_type unit_shape() const
{
return experimental::visit(unit_shape_visitor(), variant_);
}
private:
variant_type variant_;
};
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/factory.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_without_shared_parameters.hpp>
#include <agency/detail/shape_cast.hpp>
namespace agency
{
// this case handles executors which have .then_execute()
__agency_exec_check_disable__
template<class E, class Function, class Future,
__AGENCY_REQUIRES(detail::ContinuationExecutor<E>())>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_continuation_t<detail::decay_t<Function>,Future>
>
then_execute(E& exec, Function&& f, Future& predecessor)
{
return exec.then_execute(std::forward<Function>(f), predecessor);
}
namespace detail
{
template<class Function>
struct then_execute_functor
{
mutable Function f;
// this overload of operator() handles the case when there is a non-void predecessor future
template<class Index, class Predecessor, class Result>
__AGENCY_ANNOTATION
void operator()(const Index&, Predecessor& predecessor, Result& result) const
{
result = invoke_and_return_unit_if_void_result(f, predecessor);
}
// this overload of operator() handles the case when there is a void predecessor future
template<class Index, class Result>
__AGENCY_ANNOTATION
void operator()(const Index&, Result& result) const
{
result = invoke_and_return_unit_if_void_result(f);
}
};
} // end detail
// this case handles executors which have .bulk_then_execute() but not .then_execute()
__agency_exec_check_disable__
template<class E, class Function, class Future,
__AGENCY_REQUIRES(!detail::ContinuationExecutor<E>()),
__AGENCY_REQUIRES(detail::BulkContinuationExecutor<E>())>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_continuation_t<detail::decay_t<Function>,Future>
>
then_execute(E& exec, Function f, Future& predecessor)
{
using result_of_function = detail::result_of_continuation_t<Function,Future>;
// if f returns void, then return a unit from bulk_then_execute()
using result_type = typename std::conditional<
std::is_void<result_of_function>::value,
detail::unit,
result_of_function
>::type;
// XXX should really move f into this functor, but it's not clear how to make move-only
// parameters to CUDA kernels
auto execute_me = detail::then_execute_functor<Function>{f};
using shape_type = executor_shape_t<E>;
// call bulk_then_execute_without_shared_parameters() to get an intermediate future
auto intermediate_future = detail::bulk_then_execute_without_shared_parameters(
exec, // the executor
execute_me, // the functor to execute
detail::shape_cast<shape_type>(1), // create only a single agent
predecessor, // the incoming argument to f
detail::construct<result_type>() // a factory for creating f's result
);
// cast the intermediate future into the right type of future for the result
return future_traits<decltype(intermediate_future)>::template cast<result_of_function>(intermediate_future);
}
// XXX introduce a case to handle executors which only have .async_execute() ?
// XXX introduce a worst case which uses predecessor.then() and ignores the executor entirely?
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/execution/execution_agent/execution_agent_traits.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/type_list.hpp>
#include <agency/detail/control_structures/agent_shared_parameter_factory.hpp>
#include <agency/detail/scoped_in_place_type.hpp>
namespace agency
{
namespace detail
{
template<class AgentList, class... Barriers>
struct tuple_of_agent_shared_parameter_factories_impl;
template<class... ExecutionAgents, class... Barriers>
struct tuple_of_agent_shared_parameter_factories_impl<agency::detail::type_list<ExecutionAgents...>, Barriers...>
{
using type = agency::tuple<
agent_shared_parameter_factory<ExecutionAgents,Barriers>...
>;
};
template<class ExecutionAgent, class... Barriers>
struct tuple_of_agent_shared_parameter_factories : tuple_of_agent_shared_parameter_factories_impl<
typename agency::detail::execution_agent_type_list<ExecutionAgent>::type,
Barriers...
>
{};
template<class ExecutionAgent, class... Barriers>
using tuple_of_agent_shared_parameter_factories_t = typename tuple_of_agent_shared_parameter_factories<ExecutionAgent, Barriers...>::type;
// this is the terminal case for flat agents -- agents which have no ::inner_execution_agent_type
template<class ExecutionAgent,
class BarrierOrVoid,
__AGENCY_REQUIRES(
!agency::detail::has_inner_execution_agent_type<ExecutionAgent>::value
)>
__AGENCY_ANNOTATION
tuple_of_agent_shared_parameter_factories_t<ExecutionAgent, BarrierOrVoid>
make_tuple_of_agent_shared_parameter_factories(const typename agency::execution_agent_traits<ExecutionAgent>::param_type& param,
scoped_in_place_type_t<BarrierOrVoid> barrier)
{
auto factory = make_agent_shared_parameter_factory<ExecutionAgent>(param, barrier.outer());
return agency::make_tuple(factory);
}
// this is the recursive case for scoped agents -- agents which do have an ::inner_execution_agent_type
template<class ExecutionAgent,
class BarrierOrVoid, class... BarriersOrVoids,
__AGENCY_REQUIRES(
1 + sizeof...(BarriersOrVoids) == agency::detail::execution_depth<typename agency::execution_agent_traits<ExecutionAgent>::execution_category>::value
),
__AGENCY_REQUIRES(
agency::detail::has_inner_execution_agent_type<ExecutionAgent>::value
)>
__AGENCY_ANNOTATION
tuple_of_agent_shared_parameter_factories_t<ExecutionAgent, BarrierOrVoid, BarriersOrVoids...>
make_tuple_of_agent_shared_parameter_factories(const typename agency::execution_agent_traits<ExecutionAgent>::param_type& param,
scoped_in_place_type_t<BarrierOrVoid,BarriersOrVoids...> barriers)
{
using inner_execution_agent_type = typename ExecutionAgent::inner_execution_agent_type;
// recurse to get the tail of the tuple
auto inner_factories = make_tuple_of_agent_shared_parameter_factories<inner_execution_agent_type>(param.inner(), barriers.inner());
// create the head of the tuple
auto outer_factory = make_agent_shared_parameter_factory<ExecutionAgent>(param, barriers.outer());
// return a tuple of all the factories by prepending the outer_factory
return agency::tuple_cat(agency::make_tuple(outer_factory), inner_factories);
}
} // end detail
} // end agency
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/execution/executor/scoped_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include <agency/tuple.hpp>
#include "test_executors.hpp"
template<class OuterExecutor, class InnerExecutor>
void test(OuterExecutor outer_exec, InnerExecutor inner_exec)
{
using namespace agency;
using scoped_executor_type = scoped_executor<OuterExecutor,InnerExecutor>;
static_assert(is_bulk_continuation_executor<scoped_executor_type>::value,
"scoped_executor should be a bulk continuation executor");
using expected_category = scoped_execution_tag<executor_execution_category_t<OuterExecutor>, executor_execution_category_t<InnerExecutor>>;
static_assert(detail::is_detected_exact<expected_category, executor_execution_category_t, scoped_executor_type>::value,
"scoped_executor should have expected_category execution_category");
static_assert(detail::is_detected_exact<tuple<size_t,size_t>, executor_shape_t, scoped_executor_type>::value,
"scoped_executor should have detail::tuple<size_t,size_t> shape_type");
static_assert(detail::is_detected_exact<detail::index_tuple<size_t,size_t>, executor_index_t, scoped_executor_type>::value,
"scoped_executor should have detail::index_tuple<size_t,size_t> index_type");
static_assert(detail::is_detected_exact<executor_future_t<OuterExecutor,int>, executor_future_t, scoped_executor_type, int>::value,
"scoped_executor should have the same future type as OuterExecutor");
const size_t outer_depth = executor_execution_depth<OuterExecutor>::value;
const size_t inner_depth = executor_execution_depth<InnerExecutor>::value;
static_assert(executor_execution_depth<scoped_executor_type>::value == outer_depth + inner_depth,
"scoped_executor should have execution_depth == outer_depth + inner_depth");
scoped_executor_type exec(outer_exec,inner_exec);
std::future<int> fut = make_ready_future<int>(exec, 7);
using shape_type = executor_shape_t<scoped_executor_type>;
shape_type shape(10,10);
using index_type = executor_index_t<scoped_executor_type>;
auto f = exec.bulk_then_execute(
[=](index_type idx, int& past_arg, std::vector<int>& results, std::vector<int>& outer_shared_arg, std::vector<int>& inner_shared_arg)
{
auto rank = agency::get<0>(idx) * agency::get<1>(shape) + agency::get<1>(idx);
auto outer_idx = agency::get<0>(idx);
auto inner_idx = agency::get<1>(idx);
results[rank] = past_arg + outer_shared_arg[outer_idx] + inner_shared_arg[inner_idx];
},
shape,
fut,
[=]{ return std::vector<int>(detail::shape_cast<int>(shape)); }, // results
[=]{ return std::vector<int>(agency::get<0>(shape), 13); }, // outer_shared_arg
[=]{ return std::vector<int>(agency::get<1>(shape), 42); } // inner_shared_arg
);
auto result = f.get();
assert(std::vector<int>(10 * 10, 7 + 13 + 42) == result);
}
int main()
{
test(bulk_continuation_executor(), bulk_continuation_executor());
//test(bulk_continuation_executor(), bulk_synchronous_executor());
//test(bulk_continuation_executor(), bulk_asynchronous_executor());
//test(bulk_synchronous_executor(), bulk_continuation_executor());
//test(bulk_synchronous_executor(), bulk_synchronous_executor());
//test(bulk_synchronous_executor(), bulk_asynchronous_executor());
//test(bulk_asynchronous_executor(), bulk_continuation_executor());
//test(bulk_asynchronous_executor(), bulk_synchronous_executor());
//test(bulk_asynchronous_executor(), bulk_asynchronous_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_agent/detail/basic_execution_agent.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/coordinate/point.hpp>
namespace agency
{
using sequenced_agent = detail::basic_execution_agent<sequenced_execution_tag>;
using sequenced_agent_1d = sequenced_agent;
using sequenced_agent_2d = detail::basic_execution_agent<sequenced_execution_tag, size2>;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
namespace agency
{
namespace cuda
{
namespace detail
{
class block_barrier
{
public:
__AGENCY_ANNOTATION
block_barrier(int count)
#ifndef __CUDA_ARCH__
: count_(count)
#endif
{}
__AGENCY_ANNOTATION
block_barrier(block_barrier&&) = delete;
__AGENCY_ANNOTATION
int count() const
{
#ifndef __CUDA_ARCH__
return count_;
#else
return blockDim.x * blockDim.y * blockDim.z;
#endif
}
__AGENCY_ANNOTATION
void arrive_and_wait()
{
#ifdef __CUDA_ARCH__
__syncthreads();
#else
assert(0);
#endif
}
private:
#ifndef __CUDA_ARCH__
int count_;
#endif
};
} // end detail
} // end cuda
} // end agency
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/execution/executor/concurrent_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points.hpp>
int main()
{
using namespace agency;
static_assert(is_bulk_continuation_executor<concurrent_executor>::value,
"concurrent_executor should be a bulk continuation executor");
static_assert(is_bulk_executor<concurrent_executor>::value,
"concurrent_executor should be a bulk executor");
static_assert(detail::is_detected_exact<concurrent_execution_tag, executor_execution_category_t, concurrent_executor>::value,
"concurrent_executor should have concurrent_execution_tag execution_category");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, concurrent_executor>::value,
"concurrent_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, concurrent_executor>::value,
"concurrent_executor should have size_t index_type");
static_assert(detail::is_detected_exact<std::future<int>, executor_future_t, concurrent_executor, int>::value,
"concurrent_executor should have std::future future");
static_assert(executor_execution_depth<concurrent_executor>::value == 1,
"concurrent_executor should have execution_depth == 1");
concurrent_executor exec;
std::future<int> fut = agency::make_ready_future<int>(exec, 7);
size_t shape = 10;
auto f = exec.bulk_then_execute(
[](size_t idx, int& past_arg, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = past_arg + shared_arg[idx];
},
shape,
fut,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(10, 7 + 13) == result);
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/detail/feature_test.hpp>
#include <agency/cuda/detail/terminate.hpp>
#include <agency/cuda/device.hpp>
#include <agency/detail/utility.hpp>
#include <memory>
namespace agency
{
namespace cuda
{
namespace detail
{
class stream
{
public:
// creates a new stream associated with the given device
inline __host__ __device__
stream(device_id d)
: device_(d)
{
#if __cuda_lib_has_cudart
// temporarily switch to our device while creating the stream
scoped_current_device scope(device());
detail::throw_on_error(cudaStreamCreateWithFlags(&s_, cudaStreamNonBlocking), "cudaStreamCreateWithFlags in cuda::detail::stream ctor");
#else
detail::terminate_with_message("cuda::detail::stream ctor requires CUDART");
#endif
}
// creates a new stream associated with the current device
// XXX do we actually want to make this depend on the current state of the CUDA runtime?
// XXX it might be a better idea to associate the stream with device 0
// XXX alternatively, maybe it would be better if a default-constructed stream was not associated with a device
inline __host__ __device__
stream()
: stream(current_device())
{}
inline __host__ __device__
stream(stream&& other)
: device_(other.device())
{
s_ = other.release();
}
inline __host__ __device__
~stream()
{
#if __cuda_lib_has_cudart
if(valid())
{
// avoid propagating an exception but report the error if one exists
detail::print_error_message_if(cudaStreamDestroy(release()), "cudaStreamDestroy in cuda::detail::stream dtor");
}
#else
detail::terminate_with_message("cuda::detail::stream dtor requires CUDART");
#endif
}
inline __host__ __device__
device_id device() const
{
return device_;
}
// returns the underlying cudaStream_t
inline __host__ __device__
cudaStream_t native_handle() const
{
return s_;
}
// releases ownership of the underlying cudaStream_t and invalidates this stream
inline __host__ __device__
cudaStream_t release()
{
cudaStream_t result = 0;
agency::detail::adl_swap(result, s_);
return result;
}
inline __host__ __device__
bool valid() const
{
return native_handle() != 0;
}
inline __host__ __device__
void swap(stream& other)
{
device_id tmp1 = device_;
device_ = other.device_;
other.device_ = tmp1;
cudaStream_t tmp2 = s_;
s_ = other.s_;
other.s_ = tmp2;
}
inline __host__ __device__
void wait_on(cudaEvent_t e)
{
#if __cuda_lib_has_cudart
// make the next launch wait on the event
throw_on_error(cudaStreamWaitEvent(native_handle(), e, 0), "cuda::detail::stream::wait_on(cudaEvent_t): cudaStreamWaitEvent()");
#else
throw_on_error(cudaErrorNotSupported, "cuda::detail::stream::wait_on(cudaEvent_t): cudaStreamWaitEvent() requires CUDART");
#endif
}
inline __host__ __device__
void wait_on(cudaStream_t s)
{
#if __cuda_lib_has_cudart
// record an event on s
cudaEvent_t e;
throw_on_error(cudaEventCreate(&e), "cuda::detail::stream::wait_on(cudaStream_t): cudaEventCreate()");
throw_on_error(cudaEventRecord(e, s), "cuda::detail::stream::wait_on(cudaStream_t): cudaEventRecord()");
// wait on the event
wait_on(e);
// destroy the event
throw_on_error(cudaEventDestroy(e), "cuda::detail::stream::wait_on(cudaStream_t): cudaEventDestroy()");
#else
throw_on_error(cudaErrorNotSupported, "cuda::detail::stream::wait_on(cudaStream_t): requires CUDART");
#endif
}
private:
static void callback(cudaStream_t, cudaError_t, void* user_data)
{
// XXX should maybe look at the CUDA error
// convert user_data into a pointer to std::function and immediately put it inside unique_ptr
std::unique_ptr<std::function<void()>> f_ptr(reinterpret_cast<std::function<void()>*>(user_data));
// call f
(*f_ptr)();
}
public:
template<class Function>
void add_callback(Function f)
{
// make a copy of f and put it inside a std::unique_ptr to std::function
std::unique_ptr<std::function<void()>> ptr_to_fun(new std::function<void()>(f));
// release the unique_ptr's pointer into cudaStreamAddCallback()
detail::throw_on_error(cudaStreamAddCallback(native_handle(), callback, ptr_to_fun.release(), 0), "cudaStreamAddCallback in cuda::detail::stream::add_callback()");
}
private:
device_id device_;
cudaStream_t s_;
};
} // end detail
} // end cuda
} // end agency
<file_sep>/// \file
/// \brief Include this file to use bulk_then().
///
#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/control_structures/bulk_then_execution_policy.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/execution/execution_agent.hpp>
namespace agency
{
namespace detail
{
template<bool enable, class ExecutionPolicy, class Function, class Future, class... Args>
struct enable_if_bulk_then_execution_policy_impl {};
template<class ExecutionPolicy, class Function, class Future, class... Args>
struct enable_if_bulk_then_execution_policy_impl<true, ExecutionPolicy, Function, Future, Args...>
{
using type = bulk_then_execution_policy_result_t<ExecutionPolicy,Function,Future,Args...>;
};
template<class ExecutionPolicy, class Function, class Future, class... Args>
struct enable_if_bulk_then_execution_policy
: enable_if_bulk_then_execution_policy_impl<
is_bulk_then_possible_via_execution_policy<decay_t<ExecutionPolicy>,Function,Future,Args...>::value,
decay_t<ExecutionPolicy>,
Function,
Future,
Args...
>
{};
} // end detail
/// \brief Creates a bulk continuation.
/// \ingroup control_structures
///
///
/// `bulk_then` is a control structure which asynchronously creates a group of function invocations with forward progress ordering as required by an execution policy.
/// These invocations are a *bulk continuation* to a predecessor bulk asynchronous invocation. The predecessor bulk asynchronous invocation is represented by a
/// future object, and the continuation will not execute until the predecessor's future object becomes ready.
/// The results of the continuation's invocations, if any, are collected into a container and returned as `bulk_then`'s asynchronous result.
/// A future object corresponding to the eventual availability of this container is returned as `bulk_then`'s result.
///
/// `bulk_then` asynchronously creates a group of function invocations of size `N`, and each invocation i in `[0,N)` has the following form:
///
/// result_i = f(agent_i, predecessor_arg, arg_i_1, arg_i_2, ..., arg_i_M)
///
/// `agent_i` is a reference to an **execution agent** which identifies the ith invocation within the group.
/// The parameter `predecessor_arg` is a reference to the value of the future object used as a parameter to `bulk_then`. If this future object has a `void` value, then this parameter is omitted.
/// The parameter `arg_i_j` depends on the `M` arguments `arg_j` passed to `bulk_invoke`:
/// * If `arg_j` is a **shared parameter**, then it is a reference to an object shared among all execution agents in `agent_i`'s group.
/// * Otherwise, `arg_i_j` is a copy of argument `arg_j`.
///
/// If the invocations of `f` do not return `void`, these results are collected and returned in a container `results`, whose type is implementation-defined.
/// If invocation i returns `result_i`, and this invocation's `agent_i` has index `idx_i`, then `results[idx_i]` yields `result_i`.
///
/// \param policy An execution policy describing the requirements of the execution agents created by this call to `bulk_then`.
/// \param f A function defining the work to be performed by execution agents.
/// \param predecessor A future object representing the predecessor task. Its future value, if it has one, is passed to `f` as an argument when `f` is invoked.
/// After `bulk_then` returns, `predecessor` is invalid if it is not a shared future.
/// \param args Additional arguments to pass to `f` when it is invoked.
/// \return `void`, if `f` has no result; otherwise, a container of `f`'s results indexed by the execution agent which produced them.
///
/// \tparam ExecutionPolicy This type must fulfill the requirements of `ExecutionPolicy`.
/// \tparam Function `Function`'s first parameter type must be `ExecutionPolicy::execution_agent_type&`.
/// The types of its additional parameters must match `Args...`.
/// \tparam Future This type must fulfill the requirements of `Future`. If the value type of this `Future` is not `void`, this type
/// must match the type of the second parameter of `Function`.
/// \tparam Args Each type in `Args...` must match the type of the corresponding parameter of `Function`.
///
/// The following example demonstrates how to use `bulk_then` to sequence a continuation after a predecessor task:
///
/// \include hello_then.cpp
///
/// Messages from agents in the predecessor task are guaranteed to be output before messages from the continuation:
///
/// ~~~~
/// $ clang -std=c++11 -I. -lstdc++ -pthread examples/hello_then.cpp -o hello_then
/// $ ./hello_then
/// Starting predecessor and continuation tasks asynchronously...
/// Sleeping before waiting on the continuation...
/// Hello, world from agent 0 in the predecessor task
/// Hello, world from agent 1 in the predecessor task
/// Hello, world from agent 2 in the predecessor task
/// Hello, world from agent 3 in the predecessor task
/// Hello, world from agent 4 in the predecessor task
/// Hello, world from agent 0 in the continuation
/// Hello, world from agent 1 in the continuation
/// Hello, world from agent 2 in the continuation
/// Hello, world from agent 3 in the continuation
/// Hello, world from agent 4 in the continuation
/// Woke up, waiting for the continuation to complete...
/// OK
/// ~~~~
///
/// \see bulk_invoke
/// \see bulk_async
template<class ExecutionPolicy, class Function, class Future, class... Args>
__AGENCY_ANNOTATION
#ifndef DOXYGEN_SHOULD_SKIP_THIS
typename detail::enable_if_bulk_then_execution_policy<
ExecutionPolicy, Function, Future, Args...
>::type
#else
see_below
#endif
bulk_then(ExecutionPolicy&& policy, Function f, Future& predecessor, Args&&... args)
{
using agent_traits = execution_agent_traits<typename std::decay<ExecutionPolicy>::type::execution_agent_type>;
const size_t num_shared_params_for_agent = detail::execution_depth<typename agent_traits::execution_category>::value;
return detail::bulk_then_execution_policy(
detail::index_sequence_for<Args...>(),
detail::make_index_sequence<num_shared_params_for_agent>(),
policy,
f,
predecessor,
std::forward<Args>(args)...
);
}
} // end agency
<file_sep>#pragma once
#include <future>
#include <agency/execution/execution_categories.hpp>
#include <agency/future.hpp>
#include <utility>
namespace agency
{
namespace this_thread
{
class vector_executor
{
public:
using execution_category = unsequenced_execution_tag;
template<class Function, class ResultFactory, class SharedFactory>
agency::detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
auto result = result_factory();
auto shared_parm = shared_factory();
// ivdep requires gcc 4.9+
#if !defined(__INTEL_COMPILER) && !defined(__NVCC__) && (__GNUC__ >= 4) && (__GNUC_MINOR__ >= 9)
#pragma GCC ivdep
#elif defined(__INTEL_COMPILER)
#pragma simd
#endif
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_parm);
}
return std::move(result);
}
};
} // end this_thread
// XXX consider a flattened nesting similar to parallel_executor
using vector_executor = this_thread::vector_executor;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_with_one_shared_parameter.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/shape_cast.hpp>
namespace agency
{
// this case handles executors which have .async_execute()
__agency_exec_check_disable__
template<class E, class Function,
__AGENCY_REQUIRES(detail::AsynchronousExecutor<E>())
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<detail::decay_t<Function>()>
>
async_execute(E& exec, Function&& f)
{
return exec.async_execute(std::forward<Function>(f));
}
// this case handles executors which have .then_execute() but not .async_execute()
// XXX not really clear if we should prefer .bulk_async_execute() to calling then_execute()
// XXX one advantage of prioritizing an implementation using .then_execute() over .bulk_async_execute() is
// that no intermediate future is involved
// XXX also, there's no weirdness involving move-only functions which .bulk_async_execute() would have trouble with
__agency_exec_check_disable__
template<class E, class Function,
__AGENCY_REQUIRES(!detail::AsynchronousExecutor<E>()),
__AGENCY_REQUIRES(detail::ContinuationExecutor<E>())
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<detail::decay_t<Function>()>
>
async_execute(E& exec, Function&& f)
{
using void_future_type = executor_future_t<E,void>;
// XXX should really allow the executor to participate here
void_future_type ready_predecessor = future_traits<void_future_type>::make_ready();
return exec.then_execute(std::forward<Function>(f), ready_predecessor);
}
namespace detail
{
struct async_execute_functor
{
template<class Index, class Result, class SharedFunction>
__AGENCY_ANNOTATION
void operator()(const Index&, Result& result, SharedFunction& shared_function) const
{
result = invoke_and_return_unit_if_void_result(shared_function);
}
};
} // end detail
// this case handles executors which have no way to create single-agent asynchrony
__agency_exec_check_disable__
template<class E, class Function,
__AGENCY_REQUIRES(!detail::AsynchronousExecutor<E>()),
__AGENCY_REQUIRES(!detail::ContinuationExecutor<E>()),
__AGENCY_REQUIRES(detail::BulkExecutor<E>())>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<detail::decay_t<Function>()>
>
async_execute(E& exec, Function&& f)
{
using result_of_function = detail::result_of_t<Function()>;
// if f returns void, then return a unit from bulk_async_execute()
using result_type = typename std::conditional<
std::is_void<result_of_function>::value,
detail::unit,
result_of_function
>::type;
using shape_type = executor_shape_t<E>;
auto intermediate_future = agency::detail::bulk_async_execute_with_one_shared_parameter(
exec, // the executor
detail::async_execute_functor(), // the functor to execute
detail::shape_cast<shape_type>(1), // create only a single agent
detail::construct<result_type>(), // a factory for creating f's result
detail::make_moving_factory(std::forward<Function>(f)) // a factory to present f as the one shared parameter
);
// cast the intermediate future into the right type of future for the result
return future_traits<decltype(intermediate_future)>::template cast<result_of_function>(intermediate_future);
}
} // end agency
<file_sep>#include <iostream>
#include <cassert>
#include <agency/execution/executor/customization_points/unit_shape.hpp>
#include "../test_executors.hpp"
template<class Executor>
void test(Executor exec)
{
using shape_type = agency::executor_shape_t<Executor>;
shape_type unit = agency::unit_shape(exec);
assert(unit == shape_type(1));
}
int main()
{
test(bulk_synchronous_executor());
test(bulk_asynchronous_executor());
test(bulk_continuation_executor());
test(not_a_bulk_synchronous_executor());
test(not_a_bulk_asynchronous_executor());
test(not_a_bulk_continuation_executor());
test(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
#include <agency/memory/allocator/detail/allocator_traits/check_for_member_functions.hpp>
#include <agency/detail/iterator/forwarding_iterator.hpp>
#include <memory>
namespace agency
{
namespace detail
{
namespace allocator_traits_detail
{
__agency_exec_check_disable__
template<class Alloc, class T, class... Args>
__AGENCY_ANNOTATION
typename std::enable_if<
has_construct<Alloc,T*,Args...>::value
>::type
construct(Alloc& a, T* p, Args&&... args)
{
a.construct(p, std::forward<Args>(args)...);
} // end construct()
__agency_exec_check_disable__
template<class Alloc, class T, class... Args>
__AGENCY_ANNOTATION
typename std::enable_if<
!has_construct<Alloc,T*,Args...>::value
>::type
construct(Alloc&, T* p, Args&&... args)
{
::new(p) T(std::forward<Args>(args)...);
} // end construct()
} // end allocator_traits_detail
template<class Alloc>
template<class T, class... Args>
__AGENCY_ANNOTATION
void allocator_traits<Alloc>
::construct(Alloc& alloc, T* p, Args&&... args)
{
allocator_traits_detail::construct(alloc, p, std::forward<Args>(args)...);
} // end allocator_traits::construct()
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
#include <agency/memory/allocator/detail/allocator_traits/check_for_member_functions.hpp>
#include <memory>
namespace agency
{
namespace detail
{
namespace allocator_traits_detail
{
__agency_exec_check_disable__
template<class Alloc, class T>
__AGENCY_ANNOTATION
typename std::enable_if<
has_destroy<Alloc,T*>::value
>::type
destroy(Alloc& a, T* p)
{
a.destroy(p);
} // end destroy()
__agency_exec_check_disable__
template<class Alloc, class T>
__AGENCY_ANNOTATION
typename std::enable_if<
!has_destroy<Alloc,T*>::value
>::type
destroy(Alloc&, T* p)
{
p->~T();
} // end destroy()
} // end allocator_traits_detail
template<class Alloc>
template<class T>
__AGENCY_ANNOTATION
void allocator_traits<Alloc>
::destroy(Alloc& alloc, T* p)
{
allocator_traits_detail::destroy(alloc, p);
} // end allocator_traits::destroy()
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/detail/algorithm/construct_n.hpp>
#include <utility>
namespace agency
{
namespace detail
{
template<class ExecutionPolicy, class Iterator1, class Size, class Iterator2,
__AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
Iterator2 uninitialized_copy_n(ExecutionPolicy&& policy, Iterator1 first, Size n, Iterator2 result)
{
return detail::construct_n(std::forward<ExecutionPolicy>(policy), result, n, first);
}
template<class Iterator1, class Size, class Iterator2>
__AGENCY_ANNOTATION
Iterator2 uninitialized_copy_n(Iterator1 first, Size n, Iterator2 result)
{
// pass this instead of agency::seq to work around the prohibition on
// taking the address of a global constexpr object (i.e., agency::seq) from a CUDA __device__ function
sequenced_execution_policy seq;
return detail::uninitialized_copy_n(seq, first, n, result);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/experimental/ranges/chunk.hpp>
namespace agency
{
namespace experimental
{
// tile() is a synonym for chunk()
// introduce this while we bikeshed the names
template<class Range, class Difference>
__AGENCY_ANNOTATION
auto tile(Range&& rng, Difference tile_size) ->
decltype(
agency::experimental::chunk(std::forward<Range>(rng), tile_size)
)
{
return agency::experimental::chunk(std::forward<Range>(rng), tile_size);
}
// tile_evenly() is a synonym for chunk_evenly()
// introduce this while we bikeshed the names
template<class Range, class Difference>
__AGENCY_ANNOTATION
auto tile_evenly(Range&& rng, Difference number_of_chunks) ->
decltype(
agency::experimental::tile(std::forward<Range>(rng), std::declval<Difference>())
)
{
Difference chunk_size = (rng.size() + number_of_chunks - 1) / number_of_chunks;
return agency::experimental::tile(std::forward<Range>(rng), chunk_size);
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_auto_result.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/factory.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/detail/type_traits.hpp>
namespace agency
{
namespace detail
{
namespace bulk_sync_execute_with_auto_result_and_without_shared_parameters_detail
{
template<class Function>
struct ignore_shared_parameters_and_invoke
{
mutable Function f;
template<class Index, class... IgnoredArgs>
__AGENCY_ANNOTATION
result_of_t<Function(const Index&)> operator()(const Index& idx, IgnoredArgs&...) const
{
return agency::detail::invoke(f, idx);
}
};
template<size_t>
using factory_returning_ignored_result = agency::detail::unit_factory;
template<size_t... Indices, class E, class Function>
__AGENCY_ANNOTATION
auto bulk_sync_execute_with_auto_result_and_without_shared_parameters_impl(index_sequence<Indices...>,
E& exec,
Function f,
executor_shape_t<E> shape) ->
decltype(
bulk_sync_execute_with_auto_result(
exec,
ignore_shared_parameters_and_invoke<Function>{f},
shape,
factory_returning_ignored_result<Indices>()...
)
)
{
return bulk_sync_execute_with_auto_result(
exec, // the executor
ignore_shared_parameters_and_invoke<Function>{f}, // the functor to execute
shape, // the number of agents to create
factory_returning_ignored_result<Indices>()... // pass a factory for each level of execution hierarchy. the results of these factories will be ignored
);
}
} // end bulk_sync_execute_with_auto_result_and_without_shared_parameters_detail
template<class E, class Function,
__AGENCY_REQUIRES(BulkExecutor<E>())
>
__AGENCY_ANNOTATION
auto bulk_sync_execute_with_auto_result_and_without_shared_parameters(E& exec,
Function f,
executor_shape_t<E> shape) ->
decltype(
bulk_sync_execute_with_auto_result_and_without_shared_parameters_detail::bulk_sync_execute_with_auto_result_and_without_shared_parameters_impl(
detail::make_index_sequence<executor_execution_depth<E>::value>(),
exec,
f,
shape
)
)
{
namespace ns = bulk_sync_execute_with_auto_result_and_without_shared_parameters_detail;
return ns::bulk_sync_execute_with_auto_result_and_without_shared_parameters_impl(
detail::make_index_sequence<executor_execution_depth<E>::value>(),
exec,
f,
shape
);
} // end bulk_sync_execute_with_auto_result_and_without_shared_parameters()
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <utility>
namespace agency
{
template<class Future>
struct future_value
{
// the decay removes the reference returned
// from futures like shared_future
// the idea is given Future<T>,
// future_value<Future<T>> returns T
using type = typename std::decay<
decltype(std::declval<Future>().get())
>::type;
};
template<class Future>
using future_value_t = typename future_value<Future>::type;
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <agency/experimental.hpp>
#include <numeric>
#include <vector>
#include <unistd.h>
#include <stdlib.h>
#include <sys/mman.h>
#include <sys/wait.h>
// This example program demonstrates how to create a user-defined executor which forks a new process every time it creates
// an execution agent.
//
// There are two major components:
//
// 1. The executor itself, which implements its .bulk_sync_execute() function via fork() and
// 2. a special type of allocator for allocating shared memory via mmap() through which the forked processes may communicate.
//
// Finally, we validate that our executor is correct by using it to create execution for a parallel sum algorithm.
// Forked processes may communicate through shared memory which has been dynamically-allocated by mmap.
template<class T>
class shared_memory_allocator
{
public:
using value_type = T;
shared_memory_allocator() = default;
template <class U>
shared_memory_allocator(const shared_memory_allocator<U>&) {}
// allocate calls mmap with the appropriate flags for shared memory
T* allocate(std::size_t n)
{
if(n <= std::numeric_limits<std::size_t>::max() / sizeof(T))
{
return static_cast<T*>(mmap(NULL, n * sizeof (T),
PROT_READ | PROT_WRITE,
MAP_SHARED | MAP_ANONYMOUS,
-1, 0));
}
throw std::bad_alloc();
}
// deallocate just calls munmap
void deallocate(T* ptr, std::size_t)
{
munmap(ptr, sizeof(*ptr));
}
};
// This executor creates execution by forking a process for each execution agent it creates.
class fork_executor
{
public:
// forked processes execute in parallel
using execution_category = agency::parallel_execution_tag;
// forked processes communicate through shared memory
template<typename T> using allocator = shared_memory_allocator<T>;
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
// name the types of the objects returned by the factories
using result_type = typename std::result_of<ResultFactory()>::type;
using shared_parm_type = typename std::result_of<SharedFactory()>::type;
// when each forked child process invokes f, it needs to pass along the objects returned by the two factories
// because there is only a single object for each ot
//
// use our special allocator to create single element containers for the results of the factories
// XXX these should be unique_ptr, but no allocate_unique() function exists
std::vector<result_type, allocator<result_type>> result(1, result_factory());
std::vector<shared_parm_type, allocator<shared_parm_type>> shared_parm(1, shared_factory());
// create n children with fork
for(size_t i = 0; i < n; ++i)
{
if(fork() == 0)
{
// each child invokes f with the result and shared parameter
f(i, result[0], shared_parm[0]);
// forked children should exit through _exit()
_exit(0);
}
}
while(wait(nullptr) > 0)
{
// spin wait until all forked children complete
}
// return the result object by moving it for efficiency
return std::move(result[0]);
}
};
template<class ParallelPolicy>
int parallel_sum(ParallelPolicy&& policy, int* data, int n)
{
// create a view of the input
agency::experimental::span<int> input(data, n);
// divide the input into 4 tiles
int num_agents = 4;
auto tiles = agency::experimental::tile_evenly(input, num_agents);
// create agents to sum each tile in parallel
auto partial_sums = agency::bulk_invoke(policy(num_agents), [=](agency::parallel_agent& self)
{
// get this parallel agent's tile
auto this_tile = tiles[self.index()];
// return the sum of this tile
return std::accumulate(this_tile.begin(), this_tile.end(), 0);
});
// return the sum of partial sums
return std::accumulate(partial_sums.begin(), partial_sums.end(), 0);
}
int main()
{
std::vector<int> vec(32 << 20, 1);
// compute a reference
int reference = parallel_sum(agency::par, vec.data(), vec.size());
// now execute parallel_sum on our executor
fork_executor fork_exec;
int fork_sum = parallel_sum(agency::par.on(fork_exec), vec.data(), vec.size());
// validate that the results match
assert(reference == fork_sum);
std::cout << "OK" << std::endl;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/memory/allocator/detail/allocator_traits/is_allocator.hpp>
#include <memory>
#include <type_traits>
#include <typeinfo>
#include <utility>
#include <cassert>
namespace agency
{
namespace detail
{
// XXX this is very similar to std::memory_resource
// consider just implementing that and using it instead
// XXX abstract_allocator is used as an abstract base class for type erasing allocators
struct abstract_allocator
{
__AGENCY_ANNOTATION
virtual ~abstract_allocator() {}
// XXX these member functions below should be pure virtual, but
// nvcc has trouble with that
// as a workaround, define them
__AGENCY_ANNOTATION
//virtual void copy_construct_into(abstract_allocator& to) const = 0;
virtual void copy_construct_into(abstract_allocator&) const {}
__AGENCY_ANNOTATION
//virtual void copy_assign_to(abstract_allocator& to) const = 0;
virtual void copy_assign_to(abstract_allocator&) const {}
// virtual const std::type_info& type() const = 0;
virtual const std::type_info& type() const { return typeid(void); }
__AGENCY_ANNOTATION
//virtual void* allocate(std::size_t n) = 0;
virtual void* allocate(std::size_t) { return nullptr; }
__AGENCY_ANNOTATION
//virtual void deallocate(void* ptr, std::size_t n) = 0;
virtual void deallocate(void*, std::size_t) {}
__AGENCY_ANNOTATION
//virtual bool equal_to(const abstract_allocator& other) const = 0;
virtual bool equal_to(const abstract_allocator&) const { return false; }
__AGENCY_ANNOTATION
//virtual bool not_equal_to(const abstract_allocator& other) const = 0;
virtual bool not_equal_to(const abstract_allocator&) const { return true; }
};
template<class Allocator>
struct concrete_allocator : abstract_allocator
{
__agency_exec_check_disable__
__AGENCY_ANNOTATION
concrete_allocator(const concrete_allocator& other) = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~concrete_allocator() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
concrete_allocator(const Allocator& allocator) : concrete_allocator_(allocator) {}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
void copy_construct_into(abstract_allocator& to) const
{
concrete_allocator& other = static_cast<concrete_allocator&>(to);
// copy construct into other
new (&other) concrete_allocator(*this);
}
__AGENCY_ANNOTATION
void copy_assign_to(abstract_allocator& to) const
{
concrete_allocator& other = static_cast<concrete_allocator&>(to);
// copy assign a concrete allocator to other
other = *this;
}
const std::type_info& type() const
{
return typeid(Allocator);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
void* allocate(std::size_t n)
{
// call the concrete allocator object
return concrete_allocator_.allocate(n);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
void deallocate(void* ptr, std::size_t n)
{
// call the concrete allocator object
return concrete_allocator_.deallocate(reinterpret_cast<char*>(ptr), n);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bool equal_to(const abstract_allocator& other) const
{
const concrete_allocator& concrete_other = static_cast<const concrete_allocator&>(other);
// compare == with other
return concrete_allocator_ == concrete_other.concrete_allocator_;
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bool not_equal_to(const abstract_allocator& other) const
{
const concrete_allocator& concrete_other = static_cast<const concrete_allocator&>(other);
// compare != with other
return concrete_allocator_ != concrete_other.concrete_allocator_;
}
// the type of the allocator we store is Allocator rebound to allocate raw bytes
using concrete_allocator_type = typename std::allocator_traits<Allocator>::template rebind_alloc<char>;
mutable concrete_allocator_type concrete_allocator_;
};
template<class T>
class any_small_allocator
{
public:
static const size_t max_size = 4 * sizeof(void*);
using value_type = T;
any_small_allocator()
: any_small_allocator(std::allocator<T>())
{}
template<class U>
__AGENCY_ANNOTATION
any_small_allocator(const any_small_allocator<U>& other)
{
// call the allocator's copy constructor
other.get_abstract_allocator().copy_construct_into(get_abstract_allocator());
}
__agency_exec_check_disable__
template<class Allocator,
__AGENCY_REQUIRES(
!std::is_same<Allocator,any_small_allocator>::value and
(sizeof(Allocator) <= max_size) and
detail::is_allocator<Allocator>::value
)>
__AGENCY_ANNOTATION
any_small_allocator(const Allocator& allocator)
{
// rebind Allocator to get an allocator for T
using rebound_allocator_type = typename std::allocator_traits<Allocator>::template rebind_alloc<T>;
rebound_allocator_type rebound_allocator = allocator;
new (&storage_) concrete_allocator<rebound_allocator_type>(rebound_allocator);
}
__AGENCY_ANNOTATION
~any_small_allocator()
{
// call the allocator's destructor
get_abstract_allocator().~abstract_allocator();
}
any_small_allocator& operator=(const any_small_allocator& other)
{
if(type() == other.type())
{
// the types match, just call the copy assign function
other.get_abstract_allocator().copy_assign_to(get_abstract_allocator());
}
else
{
// the types match, need to destroy and then copy construct
// destroy our value
get_abstract_allocator().~abstract_allocator();
// copy construct from the other value
other.get_abstract_allocator().copy_construct_into(get_abstract_allocator());
}
return *this;
}
const std::type_info& type() const
{
return get_abstract_allocator().type();
}
__AGENCY_ANNOTATION
T* allocate(std::size_t n)
{
// allocate raw bytes using the abstract allocator and reinterpret these bytes into T
return reinterpret_cast<T*>(get_abstract_allocator().allocate(n * sizeof(T)));
}
__AGENCY_ANNOTATION
void deallocate(T* ptr, std::size_t n)
{
// reinterpret ptr into a pointer to raw bytes and deallocate using the abstract allocator
get_abstract_allocator().deallocate(ptr, n * sizeof(T));
}
bool operator==(const any_small_allocator& other) const
{
if(type() == other.type())
{
// the types match, call equal_to()
return get_abstract_allocator().equal_to(other.get_abstract_allocator());
}
return false;
}
bool operator!=(const any_small_allocator& other) const
{
if(type() == other.type())
{
// the types match, call not_equal_to()
return get_abstract_allocator().not_equal_to(other.get_abstract_allocator());
}
return true;
}
template<class Allocator>
Allocator& get()
{
if(type() != typeid(Allocator))
{
assert(0);
}
concrete_allocator<Allocator>& storage = static_cast<concrete_allocator<Allocator>&>(get_abstract_allocator());
return storage.concrete_allocator_;
}
private:
// any_small_allocator's constructor needs access to the .get_abstract_allocator() function of all other types of any_small_allocator
template<class U> friend class any_small_allocator;
__AGENCY_ANNOTATION
abstract_allocator& get_abstract_allocator()
{
return *reinterpret_cast<abstract_allocator*>(&storage_);
}
__AGENCY_ANNOTATION
const abstract_allocator& get_abstract_allocator() const
{
return *reinterpret_cast<const abstract_allocator*>(&storage_);
}
// untyped storage for the contained allocator
typename std::aligned_storage<max_size>::type storage_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/bulk_invoke.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/iterator/iterator_traits.hpp>
#include <agency/tuple.hpp>
namespace agency
{
namespace detail
{
struct copy_n_functor
{
__agency_exec_check_disable__
template<class Agent, class RandomAccessIterator1, class RandomAccessIterator2>
__AGENCY_ANNOTATION
void operator()(Agent& self, RandomAccessIterator1 first, RandomAccessIterator2 result)
{
auto i = self.rank();
result[i] = first[i];
}
};
template<class ExecutionPolicy, class RandomAccessIterator1, class Size, class RandomAccessIterator2,
__AGENCY_REQUIRES(
!policy_is_sequenced<decay_t<ExecutionPolicy>>::value and
iterators_are_random_access<RandomAccessIterator1,RandomAccessIterator2>::value
)>
__AGENCY_ANNOTATION
tuple<RandomAccessIterator1,RandomAccessIterator2> default_copy_n(ExecutionPolicy&& policy, RandomAccessIterator1 first, Size n, RandomAccessIterator2 result)
{
agency::bulk_invoke(policy(n), copy_n_functor(), first, result);
return agency::make_tuple(first + n, result + n);
}
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator,
__AGENCY_REQUIRES(
policy_is_sequenced<decay_t<ExecutionPolicy>>::value or
!iterators_are_random_access<InputIterator,OutputIterator>::value
)>
__AGENCY_ANNOTATION
tuple<InputIterator,OutputIterator> default_copy_n(ExecutionPolicy&&, InputIterator first, Size n, OutputIterator result)
{
// XXX we might wish to bulk_invoke a single agent and execute this loop inside
for(Size i = 0; i < n; ++i, ++first, ++result)
{
*result = *first;
}
return agency::make_tuple(first, result);
}
namespace copy_n_detail
{
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator>
struct has_copy_n_free_function_impl
{
template<class... Args,
class = decltype(
copy_n(std::declval<Args>()...)
)>
static std::true_type test(int);
template<class...>
static std::false_type test(...);
using type = decltype(test<ExecutionPolicy,InputIterator,Size,OutputIterator>(0));
};
// this type trait reports whether copy_n(policy, first, n, result) is well-formed
// when copy_n is called as a free function (i.e., via ADL)
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator>
using has_copy_n_free_function = typename has_copy_n_free_function_impl<ExecutionPolicy,InputIterator,Size,OutputIterator>::type;
// this is the type of the copy_n customization point
class copy_n_t
{
private:
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator,
__AGENCY_REQUIRES(has_copy_n_free_function<ExecutionPolicy,InputIterator,Size,OutputIterator>::value)>
__AGENCY_ANNOTATION
static tuple<InputIterator,OutputIterator> impl(ExecutionPolicy&& policy, InputIterator first, Size n, OutputIterator result)
{
// call copy_n() via ADL
return copy_n(std::forward<ExecutionPolicy>(policy), first, n, result);
}
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator,
__AGENCY_REQUIRES(!has_copy_n_free_function<ExecutionPolicy,InputIterator,Size,OutputIterator>::value)>
__AGENCY_ANNOTATION
static tuple<InputIterator,OutputIterator> impl(ExecutionPolicy&& policy, InputIterator first, Size n, OutputIterator result)
{
// call default_copy_n()
return agency::detail::default_copy_n(std::forward<ExecutionPolicy>(policy), first, n, result);
}
public:
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator>
__AGENCY_ANNOTATION
tuple<InputIterator,OutputIterator> operator()(ExecutionPolicy&& policy, InputIterator first, Size n, OutputIterator result) const
{
return impl(std::forward<ExecutionPolicy>(policy), first, n, result);
}
template<class InputIterator, class Size, class OutputIterator>
__AGENCY_ANNOTATION
tuple<InputIterator,OutputIterator> operator()(InputIterator first, Size n, OutputIterator result) const
{
return operator()(agency::sequenced_execution_policy(), first, n, result);
}
};
} // end copy_n_detail
namespace
{
// copy_n customization point
constexpr copy_n_detail::copy_n_t copy_n{};
} // end namespace
} // end detail
} // end agency
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <numeric>
#include <agency/execution/executor/experimental/unrolling_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
int main()
{
using namespace agency;
static const size_t unroll_factor = 10;
using executor_type = experimental::unrolling_executor<unroll_factor>;
static_assert(is_bulk_synchronous_executor<executor_type>::value,
"unrolling_executor should be a bulk synchronous executor");
static_assert(is_bulk_executor<executor_type>::value,
"unrolling_executor should be a bulk executor");
static_assert(detail::is_detected_exact<sequenced_execution_tag, executor_execution_category_t, executor_type>::value,
"unrolling_executor should have sequenced_execution_tag execution_category");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, executor_type>::value,
"unrolling_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, executor_type>::value,
"unrolling_executor should have size_t index_type");
static_assert(detail::is_detected_exact<std::future<int>, executor_future_t, executor_type, int>::value,
"unrolling_executor should have std::future furture");
static_assert(executor_execution_depth<executor_type>::value == 1,
"unrolling_executor should have execution_depth == 1");
executor_type exec;
std::vector<size_t> shapes = {0, 1, 3, unroll_factor - 1, unroll_factor, unroll_factor + 1, 2 * unroll_factor - 1, 2 * unroll_factor, 100 * unroll_factor, 10000};
for(auto shape : shapes)
{
auto result = exec.bulk_sync_execute(
[](size_t idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = idx + shared_arg[idx];
},
shape,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
std::vector<int> reference(shape);
std::iota(reference.begin(), reference.end(), 13);
assert(reference == result);
}
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/execution/executor/executor_traits/detail/member_execution_category_or.hpp>
namespace agency
{
namespace detail
{
template<class T>
struct executor_execution_depth_or
: agency::detail::execution_depth<
member_execution_category_or_t<T,unsequenced_execution_tag>
>
{};
} // end detail
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <agency/execution/executor/detail/utility.hpp>
#include <cassert>
#include <iostream>
#include "../test_executors.hpp"
template<class Executor>
void test(Executor exec)
{
std::atomic<int> counter{0};
using shape_type = agency::executor_shape_t<Executor>;
using index_type = agency::executor_index_t<Executor>;
shape_type shape{10};
auto f = agency::detail::bulk_async_execute_with_void_result(exec,
[&](index_type, int& shared_arg)
{
counter += shared_arg;
},
shape,
[]{ return 13; } // shared_arg
);
f.wait();
assert(counter == 13 * 10);
}
int main()
{
test(bulk_synchronous_executor());
test(bulk_asynchronous_executor());
test(bulk_continuation_executor());
test(not_a_bulk_synchronous_executor());
test(not_a_bulk_asynchronous_executor());
test(not_a_bulk_continuation_executor());
test(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <memory>
namespace agency
{
namespace detail
{
template<class Iterator, class Reference = typename std::iterator_traits<Iterator>::reference>
class forwarding_iterator
{
public:
using value_type = typename std::iterator_traits<Iterator>::value_type;
using reference = Reference;
using pointer = typename std::iterator_traits<Iterator>::pointer;
using difference_type = typename std::iterator_traits<Iterator>::difference_type;
using iterator_category = typename std::iterator_traits<Iterator>::iterator_category;
__AGENCY_ANNOTATION
forwarding_iterator() = default;
__AGENCY_ANNOTATION
explicit forwarding_iterator(Iterator x)
: current_(x)
{}
template<class U, class UReference>
__AGENCY_ANNOTATION
forwarding_iterator(const forwarding_iterator<U,UReference>& other)
: current_(other.current_)
{}
// dereference
__AGENCY_ANNOTATION
reference operator*() const
{
return static_cast<reference>(*current_);
}
// subscript
__AGENCY_ANNOTATION
reference operator[](difference_type n) const
{
forwarding_iterator tmp = *this + n;
return *tmp;
}
// not equal
__AGENCY_ANNOTATION
bool operator!=(const forwarding_iterator& rhs) const
{
return current_ != rhs.current_;
}
// pre-increment
__AGENCY_ANNOTATION
forwarding_iterator& operator++()
{
++current_;
return *this;
}
// post-increment
__AGENCY_ANNOTATION
forwarding_iterator operator++(int)
{
forwarding_iterator result = *this;
++current_;
return result;
}
// pre-decrement
__AGENCY_ANNOTATION
forwarding_iterator& operator--()
{
--current_;
return *this;
}
// post-decrement
__AGENCY_ANNOTATION
forwarding_iterator operator--(int)
{
forwarding_iterator result = *this;
--current_;
return result;
}
// plus-equal
__AGENCY_ANNOTATION
forwarding_iterator& operator+=(difference_type n)
{
current_ += n;
return *this;
}
// plus
__AGENCY_ANNOTATION
forwarding_iterator operator+(difference_type n) const
{
forwarding_iterator result = *this;
result += n;
return result;
}
// difference
__AGENCY_ANNOTATION
difference_type operator-(const forwarding_iterator& rhs)
{
return current_ - rhs.current_;
}
__AGENCY_ANNOTATION
Iterator base() const
{
return current_;
}
private:
Iterator current_;
};
template<class Reference, class Iterator>
__AGENCY_ANNOTATION
forwarding_iterator<Iterator,Reference> make_forwarding_iterator(Iterator i)
{
return forwarding_iterator<Iterator,Reference>(i);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <memory>
namespace agency
{
namespace detail
{
template<class T>
class constant_iterator
{
public:
using value_type = T;
using reference = const value_type&;
using pointer = const value_type*;
using difference_type = std::ptrdiff_t;
using iterator_category = std::random_access_iterator_tag;
__AGENCY_ANNOTATION
constant_iterator() = default;
__AGENCY_ANNOTATION
constant_iterator(const constant_iterator&) = default;
__AGENCY_ANNOTATION
constant_iterator(const T& value, size_t position)
: value_(value), position_(position)
{}
__AGENCY_ANNOTATION
constant_iterator(const T& value)
: constant_iterator(value, 0)
{}
// dereference
__AGENCY_ANNOTATION
reference operator*() const
{
return value_;
}
// subscript
__AGENCY_ANNOTATION
reference operator[](difference_type) const
{
// note that there is no need to create a temporary iterator
// e.g. tmp = *this + n
// because the value returned by *tmp == this->value_
return value_;
}
// not equal
__AGENCY_ANNOTATION
bool operator!=(const constant_iterator& rhs) const
{
return position_ != rhs.position_;
}
// pre-increment
__AGENCY_ANNOTATION
constant_iterator& operator++()
{
++position_;
return *this;
}
// post-increment
__AGENCY_ANNOTATION
constant_iterator operator++(int)
{
constant_iterator result = *this;
++position_;
return result;
}
// pre-decrement
__AGENCY_ANNOTATION
constant_iterator& operator--()
{
--position_;
return *this;
}
// post-decrement
__AGENCY_ANNOTATION
constant_iterator operator--(int)
{
constant_iterator result = *this;
--position_;
return result;
}
// plus-equal
__AGENCY_ANNOTATION
constant_iterator& operator+=(difference_type n)
{
position_ += n;
return *this;
}
// plus
__AGENCY_ANNOTATION
constant_iterator operator+(difference_type n) const
{
constant_iterator result = *this;
result += n;
return result;
}
// difference
__AGENCY_ANNOTATION
difference_type operator-(const constant_iterator& rhs)
{
return position_ - rhs.position_;
}
private:
T value_;
size_t position_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/detail/utility/invoke_functors.hpp>
#include <agency/execution/executor/customization_points/bulk_async_execute.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class E, class Function, class ResultFactory, class... SharedFactories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(SharedFactories)),
__AGENCY_REQUIRES(!std::is_void<result_of_t<Function(executor_index_t<E>, result_of_t<SharedFactories()>&...)>>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,result_of_t<ResultFactory()>>
bulk_async_execute_with_collected_result(E& exec, Function f, executor_shape_t<E> shape, ResultFactory result_factory, SharedFactories... shared_factories)
{
// wrap f in a functor that will collect f's result and call bulk_async_execute()
return agency::bulk_async_execute(exec, invoke_and_collect_result<Function>{f}, shape, result_factory, shared_factories...);
}
} // end detail
} // end agency
<file_sep>#include <iostream>
#include <agency/execution/executor/executor_traits.hpp>
#include "../test_executors.hpp"
struct bulk_executor_without_category
{
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory);
};
struct bulk_executor_with_category
{
using execution_category = agency::sequenced_execution_tag;
template<class Function, class ResultFactory, class SharedFactory>
typename std::result_of<ResultFactory()>::type
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory);
};
int main()
{
using namespace agency;
static_assert(!agency::detail::is_detected<executor_execution_category_t, not_an_executor>::value,
"executor_execution_category_t<not_an_executor> should not be detected");
static_assert(agency::detail::is_detected_exact<unsequenced_execution_tag, executor_execution_category_t, bulk_executor_without_category>::value,
"bulk_executor_without_category should have unsequenced_execution_tag execution_category");
static_assert(agency::detail::is_detected_exact<sequenced_execution_tag, executor_execution_category_t, bulk_executor_with_category>::value,
"bulk_executor_with_category should have sequenced_execution_tag execution_category");
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/future.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include "../test_executors.hpp"
template<class Executor>
void test(Executor exec)
{
auto f = agency::async_execute(exec, []{ return 7;});
auto result = f.get();
assert(7 == result);
}
int main()
{
test(continuation_executor());
test(asynchronous_executor());
test(bulk_continuation_executor());
test(bulk_asynchronous_executor());
// XXX call test() with all the other types of executors
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/future.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/detail/type_traits.hpp>
#include <utility>
namespace agency
{
namespace experimental
{
namespace detail
{
template<size_t first, size_t last>
struct static_for_loop_impl
{
template<class Function>
__AGENCY_ANNOTATION
static void invoke(Function&& f)
{
std::forward<Function>(f)(first);
static_for_loop_impl<first+1,last>::invoke(std::forward<Function>(f));
}
};
template<size_t first>
struct static_for_loop_impl<first,first>
{
template<class Function>
__AGENCY_ANNOTATION
static void invoke(Function&&)
{
}
};
template<size_t n, class Function>
__AGENCY_ANNOTATION
void static_for_loop(Function&& f)
{
static_for_loop_impl<0,n>::invoke(std::forward<Function>(f));
}
} // end detail
// XXX the type of unroll_factor should be auto
// maybe it should be 32b for now?
template<std::size_t unroll_factor_>
class unrolling_executor
{
public:
using execution_category = sequenced_execution_tag;
static constexpr std::size_t unroll_factor = unroll_factor_;
__AGENCY_ANNOTATION
static constexpr std::size_t unit_shape()
{
return unroll_factor;
}
template<class Function, class ResultFactory, class SharedFactory>
__AGENCY_ANNOTATION
agency::detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
auto result = result_factory();
auto shared_parm = shared_factory();
// the following implementation is equivalent to this loop
// however, #pragma unroll is not portable and
// is not guaranteed to unroll anyway
//#pragma unroll(unroll_factor)
//for(size_t i = 0; i < n; ++i)
//{
// f(i, result, shared_parm);
//}
// technically, these first two branches are not required for correctness
// they're included because in these cases we can use the static_for_loop's
// loop variable i directly without having to introduce an additional variable
if(n == unroll_factor)
{
detail::static_for_loop<unroll_factor>([&](std::size_t i)
{
f(i, result, shared_parm);
});
}
else if(n < unroll_factor)
{
detail::static_for_loop<unroll_factor>([&](std::size_t i)
{
if(i < n)
{
f(i, result, shared_parm);
}
});
}
else
{
std::size_t i = 0;
// while the unroll_factor is no larger than the remaining work,
// we don't need to guard the invocation of f()
while(unroll_factor <= n - i)
{
detail::static_for_loop<unroll_factor>([&](std::size_t)
{
f(i, result, shared_parm);
++i;
});
}
// the final loop is larger than the remaining work,
// so we need to guard the invocation of f()
if(n - i)
{
detail::static_for_loop<unroll_factor>([&](std::size_t)
{
if(i < n)
{
f(i, result, shared_parm);
++i;
}
});
}
}
return std::move(result);
}
};
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_agent/detail/basic_execution_agent.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/coordinate/point.hpp>
namespace agency
{
using parallel_agent = detail::basic_execution_agent<parallel_execution_tag>;
using parallel_agent_1d = parallel_agent;
using parallel_agent_2d = detail::basic_execution_agent<parallel_execution_tag, size2>;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_with_one_shared_parameter.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_without_shared_parameters.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_with_collected_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_continuation_executor_adaptor.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_auto_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_auto_result_and_without_shared_parameters.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_collected_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_sync_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_share_future.hpp>
#include <agency/execution/executor/detail/utility/bulk_synchronous_executor_adaptor.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_auto_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_collected_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_without_shared_parameters.hpp>
#include <agency/execution/executor/detail/utility/invoke_functors.hpp>
<file_sep>/// \file
/// \brief Include this file to use any component of Agency related to execution.
///
/// Including `<agency/execution.hpp>` recursively includes Agency header files organized beneath
/// `<agency/execution/*>`.
///
///
/// \defgroup execution Execution
///
#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/execution/executor.hpp>
#include <agency/execution/execution_agent.hpp>
#include <agency/execution/execution_policy.hpp>
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <type_traits>
#include <iterator>
namespace agency
{
namespace experimental
{
template<class Iterator, class Sentinel = Iterator>
class iterator_range
{
public:
using iterator = Iterator;
using sentinel = Sentinel;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
iterator_range(iterator begin, sentinel end)
: begin_(begin),
end_(end)
{}
__agency_exec_check_disable__
template<class Range>
__AGENCY_ANNOTATION
iterator_range(Range&& rng)
: iterator_range(std::forward<Range>(rng).begin(), std::forward<Range>(rng).end())
{}
__AGENCY_ANNOTATION
iterator begin() const
{
return begin_;
}
__AGENCY_ANNOTATION
sentinel end() const
{
return end_;
}
// "drops" the first n elements of the range by advancing the begin iterator n times
__AGENCY_ANNOTATION
void drop(typename std::iterator_traits<iterator>::difference_type n)
{
begin_ += n;
}
__AGENCY_ANNOTATION
typename std::iterator_traits<iterator>::reference operator[](typename std::iterator_traits<iterator>::difference_type i)
{
return begin()[i];
}
__AGENCY_ANNOTATION
typename std::iterator_traits<iterator>::difference_type size() const
{
return end() - begin();
}
private:
iterator begin_;
sentinel end_;
};
// iterator_ranges are already views, so don't wrap them
template<class Iterator, class Sentinel>
__AGENCY_ANNOTATION
iterator_range<Iterator,Sentinel> all(iterator_range<Iterator,Sentinel> v)
{
return v;
}
__agency_exec_check_disable__
template<class Range>
__AGENCY_ANNOTATION
iterator_range<range_iterator_t<Range>, range_sentinel_t<Range>>
make_iterator_range(Range&& rng)
{
return iterator_range<range_iterator_t<Range>, range_sentinel_t<Range>>(rng.begin(), rng.end());
}
// create a view of the given range and drop the first n elements from the view
template<class Range>
__AGENCY_ANNOTATION
iterator_range<range_iterator_t<Range>, range_sentinel_t<Range>>
drop(Range&& rng, range_difference_t<Range> n)
{
auto result = make_iterator_range(rng);
result.drop(n);
return result;
}
} // end experimental
} // end agency
<file_sep># Building and Running Example Programs
Each example program is built from a single source file. To build an example program by hand, compile a source file with a C++11 or better compiler. For example, the following command builds the `hello_world.cpp` source file from the `examples` directory:
$ clang -I.. -std=c++11 -lstdc++ -pthread hello_world.cpp
Example programs which require special compiler features, such as language extensions, are organized into subdirectories. For example, the `/cuda` subdirectory contains example programs which require a C++ compiler supporting CUDA language extensions.
CUDA C++ source (`.cu` files) should be built with the NVIDIA compiler (`nvcc`). Include the `--expt-extended-lambda` option:
$ nvcc -I.. -std=c++11 --expt-extended-lambda cuda/saxpy.cu
## Automated Builds
To build the test programs automatically, run the following command from this directory:
$ scons
To accelerate the build process, run the following command to run 8 jobs in parallel:
$ scons -j8
To build *and* run the example programs, specify `run_examples` as a command line argument:
$ scons -j8 run_examples
To build all tests underneath a particular subdirectory, run `scons` with the path to the subdirectory of interest as a command line argument.
For example, the following command builds all of the test programs underneath the `cuda` subdirectory:
$ scons cuda
Likewise, the following command will build *and* run the tests programs underneath the `cuda` subdirectory:
$ scons cuda/run_examples
# Build System Structure
The top-level directory named 'examples' contains a `SConstruct` and `SConscript` file. `SContruct` contains definitions of common functionality used by the rest of the build system. `SConscript` describes what targets the build process should build.
After setting up a SCons build environment, the `SConstruct` sets up a hierarchical build by invoking the top-level `SConscript` files in the root directory.
The top-level `SConscript` file calls the `RecursivelyCreateProgramsAndUnitTestAliases()` method to recursively traverse the directory tree and create a program and unit test from each source file.
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/future.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include "../test_executors.hpp"
template<class Executor>
void test_with_non_void_predecessor(Executor exec)
{
auto predecessor_future = agency::make_ready_future<int>(exec, 7);
using index_type = agency::executor_index_t<Executor>;
size_t shape = 10;
auto f = agency::bulk_then_execute(exec,
[](index_type idx, int& predecessor, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = predecessor + shared_arg[idx];
},
shape,
predecessor_future,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(10, 7 + 13) == result);
}
template<class Executor>
void test_with_void_predecessor(Executor exec)
{
auto predecessor_future = agency::make_ready_future<void>(exec);
using index_type = agency::executor_index_t<Executor>;
size_t shape = 10;
auto f = agency::bulk_then_execute(exec,
[](index_type idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = shared_arg[idx];
},
shape,
predecessor_future,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(10, 13) == result);
}
int main()
{
test_with_non_void_predecessor(bulk_synchronous_executor());
test_with_non_void_predecessor(bulk_asynchronous_executor());
test_with_non_void_predecessor(bulk_continuation_executor());
test_with_non_void_predecessor(not_a_bulk_synchronous_executor());
test_with_non_void_predecessor(not_a_bulk_asynchronous_executor());
test_with_non_void_predecessor(not_a_bulk_continuation_executor());
test_with_non_void_predecessor(complete_bulk_executor());
test_with_void_predecessor(bulk_synchronous_executor());
test_with_void_predecessor(bulk_asynchronous_executor());
test_with_void_predecessor(bulk_continuation_executor());
test_with_void_predecessor(not_a_bulk_synchronous_executor());
test_with_void_predecessor(not_a_bulk_asynchronous_executor());
test_with_void_predecessor(not_a_bulk_continuation_executor());
test_with_void_predecessor(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/tuple/arithmetic_tuple_facade.hpp>
#include <agency/detail/operator_traits.hpp>
#include <agency/container/array.hpp>
#include <agency/coordinate/detail/named_array.hpp>
#include <type_traits>
#include <initializer_list>
#include <cassert>
namespace agency
{
namespace detail
{
// in general, point's base class is array<T,Rank> but low-rank points get named elements
template<class T, size_t Rank>
struct point_base
{
using type = array<T,Rank>;
};
template<class T>
struct point_base<T,1>
{
using type = named_array<T,1>;
};
template<class T>
struct point_base<T,2>
{
using type = named_array<T,2>;
};
template<class T>
struct point_base<T,3>
{
using type = named_array<T,3>;
};
template<class T>
struct point_base<T,4>
{
using type = named_array<T,4>;
};
template<class T, size_t Rank>
using point_base_t = typename point_base<T,Rank>::type;
} // end detail
// T is any type with operators +, +=, -, -=, *, *=, /, /=, <
template<class T, size_t Rank>
class point : public agency::detail::point_base_t<T,Rank>,
public agency::detail::arithmetic_tuple_facade<point<T,Rank>>
{
static_assert(agency::detail::has_arithmetic_operators<T>::value, "T must have arithmetic operators.");
using super_t = detail::point_base_t<T,Rank>;
public:
using typename super_t::value_type;
using typename super_t::reference;
using typename super_t::size_type;
using typename super_t::pointer;
using typename super_t::const_pointer;
__AGENCY_ANNOTATION
point() = default;
__AGENCY_ANNOTATION
point(const point &) = default;
template<class... OtherT,
__AGENCY_REQUIRES(
detail::conjunction<
std::is_convertible<OtherT,value_type>...
>::value &&
sizeof...(OtherT) == Rank
)>
__AGENCY_ANNOTATION
point(OtherT... args)
: super_t{{static_cast<value_type>(args)...}}
{
}
// this constructor is included to allow us to pass curly-braced lists through
// interfaces which eventually get unpacked into points
// for example, in expressions like this:
//
// auto policy = agency::par2d({0,0}, {5,5});
//
// XXX trying to initialize a point from an initializer_list of the wrong size
// should be a compile-time error
// the problem is that l.size() can't always be used in static_assert
template<class OtherT,
__AGENCY_REQUIRES(
std::is_convertible<OtherT,value_type>::value
)>
__AGENCY_ANNOTATION
point(std::initializer_list<OtherT> l)
{
// l.size() needs to equal Rank
assert(l.size() == Rank);
auto src = l.begin();
for(auto dst = super_t::begin(); dst != super_t::end(); ++src, ++dst)
{
*dst = *src;
}
}
// XXX should fully parameterize this
template<class OtherT,
__AGENCY_REQUIRES(
std::is_convertible<OtherT,value_type>::value
)>
__AGENCY_ANNOTATION
point(const point<OtherT,Rank>& other)
{
detail::arithmetic_tuple_facade<point>::copy(other);
}
// fills the point with a constant value
template<class OtherT,
__AGENCY_REQUIRES(
(Rank > 1) &&
std::is_convertible<OtherT,value_type>::value
)>
__AGENCY_ANNOTATION
explicit point(OtherT val)
{
detail::arithmetic_tuple_facade<point>::fill(val);
}
// XXX this should be eliminated
__AGENCY_ANNOTATION
operator pointer ()
{
return super_t::data();
}
// XXX this should be eliminated
__AGENCY_ANNOTATION
operator const_pointer () const
{
return super_t::data();
}
};
template<size_t i, class T, size_t Rank>
__AGENCY_ANNOTATION
T& get(point<T,Rank>& p)
{
return p[i];
}
template<size_t i, class T, size_t Rank>
__AGENCY_ANNOTATION
const T& get(const point<T,Rank>& p)
{
return p[i];
}
template<size_t i, class T, size_t Rank>
__AGENCY_ANNOTATION
T&& get(point<T,Rank>&& p)
{
return std::move(agency::get<i>(p));
}
// scalar multiply
// XXX fix return type -- it should be point<common_type,Rank>
template<class T1, class T2, size_t Rank>
__AGENCY_ANNOTATION
typename std::enable_if<
(std::is_arithmetic<T1>::value && agency::detail::has_operator_multiplies<T1,T2>::value),
point<T2,Rank>
>::type
operator*(T1 val, const point<T2,Rank>& p)
{
using result_type = point<T2, Rank>;
return result_type(val) * p;
}
using int0 = point<int,0>;
using int1 = point<int,1>;
using int2 = point<int,2>;
using int3 = point<int,3>;
using int4 = point<int,4>;
using int5 = point<int,5>;
using int6 = point<int,6>;
using int7 = point<int,7>;
using int8 = point<int,8>;
using int9 = point<int,9>;
using int10 = point<int,10>;
using uint0 = point<unsigned int,0>;
using uint1 = point<unsigned int,1>;
using uint2 = point<unsigned int,2>;
using uint3 = point<unsigned int,3>;
using uint4 = point<unsigned int,4>;
using uint5 = point<unsigned int,5>;
using uint6 = point<unsigned int,6>;
using uint7 = point<unsigned int,7>;
using uint8 = point<unsigned int,8>;
using uint9 = point<unsigned int,9>;
using uint10 = point<unsigned int,10>;
using size0 = point<size_t,0>;
using size1 = point<size_t,1>;
using size2 = point<size_t,2>;
using size3 = point<size_t,3>;
using size4 = point<size_t,4>;
using size5 = point<size_t,5>;
using size6 = point<size_t,6>;
using size7 = point<size_t,7>;
using size8 = point<size_t,8>;
using size9 = point<size_t,9>;
using size10 = point<size_t,10>;
using float0 = point<float,0>;
using float1 = point<float,1>;
using float2 = point<float,2>;
using float3 = point<float,3>;
using float4 = point<float,4>;
using float5 = point<float,5>;
using float6 = point<float,6>;
using float7 = point<float,7>;
using float8 = point<float,8>;
using float9 = point<float,9>;
using float10 = point<float,10>;
using double0 = point<double,0>;
using double1 = point<double,1>;
using double2 = point<double,2>;
using double3 = point<double,3>;
using double4 = point<double,4>;
using double5 = point<double,5>;
using double6 = point<double,6>;
using double7 = point<double,7>;
using double8 = point<double,8>;
using double9 = point<double,9>;
using double10 = point<double,10>;
} // end agency
// specialize Tuple-like interface for agency::point
namespace std
{
template<class T, size_t Rank>
class tuple_size<agency::point<T,Rank>> : public std::integral_constant<std::size_t, Rank> {};
template<size_t I, class T, size_t Rank>
struct tuple_element<I,agency::point<T,Rank>>
{
using type = T;
};
} // end std
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/experimental/variant.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
namespace agency
{
template<class Alloc, class... Allocs>
class variant_allocator
{
private:
using variant_type = agency::experimental::variant<Alloc, Allocs...>;
public:
using value_type = typename std::allocator_traits<Alloc>::value_type;
template<class T>
struct rebind
{
using other = variant_allocator<
typename std::allocator_traits<Alloc>::template rebind_alloc<T>,
typename std::allocator_traits<Allocs>::template rebind_alloc<T>...
>;
};
__AGENCY_ANNOTATION
variant_allocator() = default;
__AGENCY_ANNOTATION
variant_allocator(const variant_allocator&) = default;
public:
// this constructor converts from another allocator, when possible
template<class OtherAlloc,
__AGENCY_REQUIRES(
std::is_constructible<variant_type, const OtherAlloc&>::value
)>
__AGENCY_ANNOTATION
variant_allocator(const OtherAlloc& alloc)
: variant_(alloc)
{}
private:
template<class, class...>
friend class variant_allocator;
template<class OtherAlloc, class... OtherAllocs>
struct converting_constructor_visitor
{
template<class A>
__AGENCY_ANNOTATION
variant_type operator()(const A& alloc) const
{
// lookup the index of A in <OtherAlloc, OtherAllocs...>
constexpr std::size_t index = agency::experimental::detail::variant_detail::find_type<A, OtherAlloc, OtherAllocs...>::value;
// lookup the type of the corresponding allocator in our variant
using target_allocator_type = agency::experimental::variant_alternative_t<index, variant_type>;
return static_cast<target_allocator_type>(alloc);
}
};
public:
// this constructor converts from another variant_allocator
template<class OtherAlloc, class... OtherAllocs,
__AGENCY_REQUIRES(sizeof...(Allocs) == sizeof...(OtherAllocs)),
__AGENCY_REQUIRES(
detail::conjunction<
std::is_constructible<Alloc,const OtherAlloc&>,
std::is_constructible<Allocs,const OtherAllocs&>...
>::value
)>
__AGENCY_ANNOTATION
variant_allocator(const variant_allocator<OtherAlloc,OtherAllocs...>& other)
: variant_(experimental::visit(converting_constructor_visitor<OtherAlloc,OtherAllocs...>{}, other.variant_))
{}
private:
struct allocate_visitor
{
std::size_t n;
template<class A>
__AGENCY_ANNOTATION
value_type* operator()(A& alloc) const
{
return detail::allocator_traits<A>::allocate(alloc, n);
}
};
public:
__AGENCY_ANNOTATION
value_type* allocate(std::size_t n)
{
return experimental::visit(allocate_visitor{n}, variant_);
}
private:
struct deallocate_visitor
{
value_type* ptr;
std::size_t n;
template<class A>
__AGENCY_ANNOTATION
void operator()(A& alloc) const
{
detail::allocator_traits<A>::deallocate(alloc, ptr, n);
}
};
public:
__AGENCY_ANNOTATION
void deallocate(value_type* ptr, std::size_t n)
{
return experimental::visit(deallocate_visitor{ptr,n}, variant_);
}
#if __cpp_generic_lambdas
template<class T, class... Args>
__AGENCY_ANNOTATION
void construct(T* ptr, Args&&... args)
{
return experimental::visit([&](auto& alloc)
{
using allocator_type = decltype(alloc);
return detail::allocator_traits<allocator_type>::construct(alloc, ptr, std::forward<Args>(args)...);
},
variant_);
}
#endif
private:
template<class T>
struct destroy_visitor
{
T* ptr;
template<class A>
__AGENCY_ANNOTATION
void operator()(A& alloc) const
{
detail::allocator_traits<A>::destroy(alloc, ptr);
}
};
public:
template<class T>
__AGENCY_ANNOTATION
void destroy(T* ptr)
{
experimental::visit(destroy_visitor<T>{ptr}, variant_);
}
private:
struct max_size_visitor
{
template<class A>
__AGENCY_ANNOTATION
std::size_t operator()(const A& alloc) const
{
return detail::allocator_traits<A>::max_size(alloc);
}
};
public:
__AGENCY_ANNOTATION
std::size_t max_size() const
{
return experimental::visit(max_size_visitor{}, variant_);
}
__AGENCY_ANNOTATION
bool operator==(const variant_allocator& other) const
{
return variant_ == other.variant_;
}
__AGENCY_ANNOTATION
bool operator!=(const variant_allocator& other) const
{
return variant_ != other.variant_;
}
private:
variant_type variant_;
};
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_agent.hpp>
#include <agency/execution/execution_policy/execution_policy_traits.hpp>
#include <agency/execution/executor/sequenced_executor.hpp>
namespace agency
{
namespace detail
{
// simple_sequenced_policy is a simplified version of sequenced_execution_policy.
// The reason it exists is to avoid circular dependencies created between some fancy
// executor types and basic_execution_policy.
//
// Unlike sequenced_execution_policy, simple_sequenced_policy does not inherit from
// basic_execution_policy, and so does not inherit those circular dependencies.
//
// The functionality from sequenced_execution_policy missing from simple_sequenced_policy is .on() and operator().
// Fortunately, the envisioned use cases for simple_sequenced_policy do not require that functionality.
class simple_sequenced_policy
{
public:
using execution_agent_type = sequenced_agent;
using executor_type = sequenced_executor;
using param_type = typename execution_agent_traits<execution_agent_type>::param_type;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
simple_sequenced_policy() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
simple_sequenced_policy(const param_type& param, const executor_type& executor = executor_type{})
: param_(param),
executor_(executor)
{}
__AGENCY_ANNOTATION
const param_type& param() const
{
return param_;
}
__AGENCY_ANNOTATION
executor_type& executor() const
{
return executor_;
}
private:
param_type param_;
mutable executor_type executor_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <exception>
#include <stdexcept>
#include <cstdio>
#include <agency/cuda/detail/feature_test.hpp>
#include <thrust/system_error.h>
#include <thrust/system/cuda/error.h>
namespace agency
{
namespace cuda
{
namespace detail
{
__host__ __device__
inline void terminate()
{
#ifdef __CUDA_ARCH__
asm("trap;");
#else
std::terminate();
#endif
}
__host__ __device__
inline void terminate_with_message(const char* message)
{
printf("%s\n", message);
terminate();
}
__host__ __device__
inline void print_error_message(cudaError_t e, const char* message)
{
#if (__cuda_lib_has_printf && __cuda_lib_has_cudart)
printf("Error after %s: %s\n", message, cudaGetErrorString(e));
#elif __cuda_lib_has_printf
printf("Error: %s\n", message);
#endif
}
__host__ __device__
inline void print_error_message_if(cudaError_t e, const char* message)
{
if(e)
{
agency::cuda::detail::print_error_message(e, message);
}
}
__host__ __device__
inline void terminate_on_error(cudaError_t e, const char* message)
{
if(e)
{
agency::cuda::detail::print_error_message(e, message);
terminate();
}
}
inline __host__ __device__
void throw_on_error(cudaError_t e, const char* message)
{
if(e)
{
#ifndef __CUDA_ARCH__
throw thrust::system_error(e, thrust::cuda_category(), message);
#else
agency::cuda::detail::print_error_message(e, message);
terminate();
#endif
}
}
inline __host__ __device__
void throw_runtime_error(const char* message)
{
#ifndef __CUDA_ARCH__
throw std::runtime_error(message);
#else
detail::terminate_with_message(message);
#endif
}
} // end detail
} // cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
#include <agency/detail/utility.hpp>
#include <agency/detail/iterator.hpp>
#include <agency/detail/algorithm.hpp>
#include <agency/memory/allocator.hpp>
#include <agency/memory/detail/storage.hpp>
#include <memory>
#include <initializer_list>
namespace agency
{
namespace detail
{
__AGENCY_ANNOTATION
inline void throw_length_error(const char* what_arg)
{
#ifdef __CUDA_ARCH__
printf("length_error: %s\n", what_arg);
assert(0);
#else
throw std::length_error(what_arg);
#endif
}
__AGENCY_ANNOTATION
inline void throw_out_of_range(const char* what_arg)
{
#ifdef __CUDA_ARCH__
printf("out_of_range: %s\n", what_arg);
assert(0);
#else
throw std::out_of_range(what_arg);
#endif
}
} // end detail
template<class T, class Allocator = allocator<T>>
class vector
{
private:
using storage_type = detail::storage<T,Allocator>;
public:
using allocator_type = Allocator;
using value_type = typename detail::allocator_traits<allocator_type>::value_type;
using size_type = typename detail::allocator_traits<allocator_type>::size_type;
using difference_type = typename detail::allocator_traits<allocator_type>::difference_type;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = typename detail::allocator_traits<allocator_type>::pointer;
using const_pointer = typename detail::allocator_traits<allocator_type>::const_pointer;
using iterator = pointer;
using const_iterator = const_pointer;
using reverse_iterator = detail::reverse_iterator<iterator>;
using const_reverse_iterator = detail::reverse_iterator<const_iterator>;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
vector() : vector(Allocator()) {}
__AGENCY_ANNOTATION
explicit vector(const Allocator& alloc)
: storage_(alloc), end_(begin())
{}
__AGENCY_ANNOTATION
vector(size_type count, const T& value, const Allocator& alloc = Allocator())
: vector(sequenced_execution_policy(), count, value, alloc)
{}
template<class ExecutionPolicy, __AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
vector(ExecutionPolicy&& policy, size_type count, const T& value, const Allocator& alloc = Allocator())
: vector(std::forward<ExecutionPolicy>(policy), detail::constant_iterator<T>(value,0), detail::constant_iterator<T>(value,count), alloc)
{}
__AGENCY_ANNOTATION
explicit vector(size_type count, const Allocator& alloc = Allocator())
: vector(sequenced_execution_policy(), count, alloc)
{}
template<class ExecutionPolicy, __AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
vector(ExecutionPolicy&& policy, size_type count, const Allocator& alloc = Allocator())
: vector(std::forward<ExecutionPolicy>(policy), count, T(), alloc)
{}
template<class InputIterator,
__AGENCY_REQUIRES(
std::is_convertible<
typename std::iterator_traits<InputIterator>::iterator_category,
std::input_iterator_tag
>::value
)>
__AGENCY_ANNOTATION
vector(InputIterator first, InputIterator last, const Allocator& alloc = Allocator())
: vector(sequenced_execution_policy(), first, last, alloc)
{}
// this is the most fundamental constructor
template<class ExecutionPolicy,
class InputIterator,
__AGENCY_REQUIRES(
std::is_convertible<
typename std::iterator_traits<InputIterator>::iterator_category,
std::input_iterator_tag
>::value
)>
__AGENCY_ANNOTATION
vector(ExecutionPolicy&& policy, InputIterator first, InputIterator last, const Allocator& alloc = Allocator())
: storage_(alloc), // initialize the storage to empty
end_(begin()) // initialize end_ to begin()
{
insert(std::forward<ExecutionPolicy>(policy), end(), first, last);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
vector(const vector& other)
: vector(sequenced_execution_policy(), other, other.get_allocator())
{}
__agency_exec_check_disable__
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
vector(ExecutionPolicy&& policy, const vector& other)
: vector(std::forward<ExecutionPolicy>(policy), other, other.get_allocator())
{}
__AGENCY_ANNOTATION
vector(const vector& other, const Allocator& alloc)
: vector(sequenced_execution_policy(), other.begin(), other.end(), alloc)
{}
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
vector(ExecutionPolicy&& policy, const vector& other, const Allocator& alloc)
: vector(std::forward<ExecutionPolicy>(policy), other.begin(), other.end(), alloc)
{}
__AGENCY_ANNOTATION
vector(vector&& other)
: storage_(std::move(other.storage_)),
end_(other.end_)
{
// leave the other vector in a valid state
other.end_ = other.begin();
}
__AGENCY_ANNOTATION
vector(vector&& other, const Allocator& alloc)
: storage_(std::move(other.storage_), alloc),
end_(other.end_)
{}
__AGENCY_ANNOTATION
vector(std::initializer_list<T> init, const Allocator& alloc = Allocator())
: vector(init.begin(), init.end(), alloc)
{}
__AGENCY_ANNOTATION
~vector()
{
clear();
}
__AGENCY_ANNOTATION
vector& operator=(const vector& other)
{
assign(other.begin(), other.end());
return *this;
}
__AGENCY_ANNOTATION
vector& operator=(vector&& other)
{
storage_ = std::move(other.storage_);
detail::adl_swap(end_, other.end_);
return *this;
}
__AGENCY_ANNOTATION
vector& operator=(std::initializer_list<T> ilist)
{
assign(ilist.begin(), ilist.end());
return *this;
}
__AGENCY_ANNOTATION
void assign(size_type count, const T& value)
{
assign(sequenced_execution_policy(), count, value);
}
template<class ExecutionPolicy, __AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
void assign(ExecutionPolicy&& policy, size_type count, const T& value)
{
assign(std::forward<ExecutionPolicy>(policy), detail::constant_iterator<T>(value,0), detail::constant_iterator<T>(value,count));
}
private:
template<class ExecutionPolicy, class ForwardIterator>
__AGENCY_ANNOTATION
void assign(std::forward_iterator_tag, ExecutionPolicy&& policy, ForwardIterator first, ForwardIterator last)
{
size_type n = detail::distance(first, last);
if(n > capacity())
{
// n is too large for capacity, swap with a new vector
vector new_vector(policy, first, last);
swap(new_vector);
}
else if(size() >= n)
{
// we can already accomodate the new range
iterator old_end = end();
end_ = detail::copy(policy, first, last, begin());
// destroy the old elements
detail::destroy(policy, storage_.allocator(), end(), old_end);
}
else
{
// range fits inside allocated storage
// copy to already existing elements
auto mid_and_end = detail::copy_n(policy, first, size(), begin());
// construct new elements at the end
// XXX we should really involve the allocator in construction here
end_ = detail::uninitialized_copy_n(policy, agency::get<0>(mid_and_end), n - size(), end());
}
}
template<class ExecutionPolicy, class InputIterator>
__AGENCY_ANNOTATION
void assign(std::input_iterator_tag, ExecutionPolicy&& policy, InputIterator first, InputIterator last)
{
iterator current = begin();
// assign to elements which already exist
for(; first != last && current != end(); ++current, ++first)
{
*current = *first;
}
// either only the input was exhausted or both
// the the input and vector elements were exhaused
if(first == last)
{
// if we exhausted the input, erase leftover elements
erase(policy, current, end());
}
else
{
// insert the rest of the input at the end of the vector
insert(policy, end(), first, last);
}
}
public:
template<class InputIterator>
__AGENCY_ANNOTATION
void assign(InputIterator first, InputIterator last)
{
assign(sequenced_execution_policy(), first, last);
}
template<class ExecutionPolicy, class InputIterator,
__AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
void assign(ExecutionPolicy&& policy, InputIterator first, InputIterator last)
{
assign(typename std::iterator_traits<InputIterator>::iterator_category(), std::forward<ExecutionPolicy>(policy), first, last);
}
__AGENCY_ANNOTATION
void assign(std::initializer_list<T> ilist)
{
assign(ilist.begin(), ilist.end());
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
allocator_type get_allocator() const
{
return storage_.allocator();
}
// element access
__AGENCY_ANNOTATION
reference at(size_type pos)
{
if(pos >= size())
{
detail::throw_out_of_range("pos >= size() in vector::at()");
}
return operator[](pos);
}
__AGENCY_ANNOTATION
const_reference at(size_type pos) const
{
if(pos >= size())
{
detail::throw_out_of_range("pos >= size() in vector::at()");
}
return operator[](pos);
}
__AGENCY_ANNOTATION
reference operator[](size_type pos)
{
return begin()[pos];
}
__AGENCY_ANNOTATION
const_reference operator[](size_type pos) const
{
return begin()[pos];
}
__AGENCY_ANNOTATION
reference front()
{
return *begin();
}
__AGENCY_ANNOTATION
const_reference front() const
{
return *begin();
}
__AGENCY_ANNOTATION
reference back()
{
return *(end()-1);
}
__AGENCY_ANNOTATION
const_reference back() const
{
return *(end()-1);
}
__AGENCY_ANNOTATION
T* data()
{
return storage_.data();
}
__AGENCY_ANNOTATION
const T* data() const
{
return storage_.data();
}
// iterators
__AGENCY_ANNOTATION
iterator begin()
{
return storage_.data();
}
__AGENCY_ANNOTATION
const_iterator begin() const
{
return cbegin();
}
__AGENCY_ANNOTATION
const_iterator cbegin() const
{
return storage_.data();
}
__AGENCY_ANNOTATION
iterator end()
{
return end_;
}
__AGENCY_ANNOTATION
const_iterator end() const
{
return cend();
}
__AGENCY_ANNOTATION
const_iterator cend() const
{
return end_;
}
__AGENCY_ANNOTATION
reverse_iterator rbegin()
{
return reverse_iterator(end());
}
__AGENCY_ANNOTATION
const_reverse_iterator rbegin() const
{
return reverse_iterator(cend());
}
__AGENCY_ANNOTATION
const_reverse_iterator crbegin() const
{
return rbegin();
}
__AGENCY_ANNOTATION
reverse_iterator rend()
{
return reverse_iterator(begin());
}
__AGENCY_ANNOTATION
const_reverse_iterator rend() const
{
return reverse_iterator(cbegin());
}
__AGENCY_ANNOTATION
const_reverse_iterator crend() const
{
return rend();
}
// capacity
__AGENCY_ANNOTATION
bool empty() const
{
return cbegin() == cend();
}
__AGENCY_ANNOTATION
size_type size() const
{
return end() - begin();
}
__AGENCY_ANNOTATION
size_type max_size() const
{
return detail::allocator_traits<allocator_type>::max_size(storage_.allocator());
}
__AGENCY_ANNOTATION
void reserve(size_type new_capacity)
{
reserve(sequenced_execution_policy(), new_capacity);
}
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
void reserve(ExecutionPolicy&& policy, size_type new_capacity)
{
if(new_capacity > capacity())
{
if(new_capacity > max_size())
{
detail::throw_length_error("reserve(): new capacity exceeds max_size().");
}
// create a new storage object
storage_type new_storage(new_capacity, storage_.allocator());
// copy our elements into the new storage
// XXX we should really involve the allocator in construction here
end_ = detail::uninitialized_copy(std::forward<ExecutionPolicy>(policy), begin(), end(), new_storage.data());
// swap out our storage
storage_.swap(new_storage);
}
}
__AGENCY_ANNOTATION
size_type capacity() const
{
return storage_.size();
}
__AGENCY_ANNOTATION
void shrink_to_fit()
{
shrink_to_fit(sequenced_execution_policy());
}
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
void shrink_to_fit(ExecutionPolicy&& policy)
{
if(size() != capacity())
{
// move our elements into a temporary, and then swap this vector with the temporary
vector temp(std::forward<ExecutionPolicy>(policy), detail::make_move_iterator(begin()), detail::make_move_iterator(end()), get_allocator());
temp.swap(*this);
}
}
// modifiers
__AGENCY_ANNOTATION
void clear()
{
detail::destroy(storage_.allocator(), begin(), end());
end_ = begin();
}
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
void clear(ExecutionPolicy&& policy)
{
detail::destroy(std::forward<ExecutionPolicy>(policy), storage_.allocator(), begin(), end());
end_ = begin();
}
// single element insert
__AGENCY_ANNOTATION
iterator insert(const_iterator position, const T& value)
{
return emplace(position, value);
}
__AGENCY_ANNOTATION
iterator insert(const_iterator position, T&& value)
{
return emplace(position, std::move(value));
}
// fill insert
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
iterator insert(ExecutionPolicy&& policy, const_iterator position, size_type count, const T& value)
{
return insert(std::forward<ExecutionPolicy>(policy), position, detail::constant_iterator<T>(value,0), detail::constant_iterator<T>(value,count));
}
__AGENCY_ANNOTATION
iterator insert(const_iterator position, size_type count, const T& value)
{
sequenced_execution_policy seq;
return insert(seq, position, count, value);
}
template<class ExecutionPolicy,
class ForwardIterator,
__AGENCY_REQUIRES(
std::is_convertible<
typename std::iterator_traits<ForwardIterator>::iterator_category,
std::forward_iterator_tag
>::value
)
>
__AGENCY_ANNOTATION
iterator insert(ExecutionPolicy&& policy, const_iterator position, ForwardIterator first, ForwardIterator last)
{
return emplace_n(std::forward<ExecutionPolicy>(policy), position, detail::distance(first, last), first);
}
// range insert
template<class ForwardIterator,
__AGENCY_REQUIRES(
std::is_convertible<
typename std::iterator_traits<ForwardIterator>::iterator_category,
std::forward_iterator_tag
>::value
)
>
__AGENCY_ANNOTATION
iterator insert(const_iterator position, ForwardIterator first, ForwardIterator last)
{
sequenced_execution_policy seq;
return insert(seq, position, first, last);
}
template<class InputIterator,
__AGENCY_REQUIRES(
!std::is_convertible<
typename std::iterator_traits<InputIterator>::iterator_category,
std::forward_iterator_tag
>::value
)
>
__AGENCY_ANNOTATION
iterator insert(const_iterator position, InputIterator first, InputIterator last)
{
for(; first != last; ++first)
{
position = insert(position, *first);
}
return position;
}
__AGENCY_ANNOTATION
iterator insert(const_iterator pos, std::initializer_list<T> ilist)
{
return insert(pos, ilist.begin(), ilist.end());
}
template<class... Args>
__AGENCY_ANNOTATION
iterator emplace(const_iterator pos, Args&&... args)
{
sequenced_execution_policy seq;
return emplace_n(seq, pos, 1, detail::make_forwarding_iterator<Args&&>(&args)...);
}
__AGENCY_ANNOTATION
iterator erase(const_iterator pos)
{
return erase(pos, pos + 1);
}
__AGENCY_ANNOTATION
iterator erase(const_iterator first, const_iterator last)
{
return erase(sequenced_execution_policy(), first, last);
}
template<class ExecutionPolicy>
__AGENCY_ANNOTATION
iterator erase(ExecutionPolicy&& policy, const_iterator first_, const_iterator last_)
{
// get mutable iterators
iterator first = begin() + (first_ - begin());
iterator last = begin() + (last_ - begin());
// overlap copy the range [last,end()) to first
iterator old_end = end();
end_ = detail::overlapped_copy(policy, last, end(), first);
// destroy everything after end()
detail::destroy(policy, storage_.allocator(), end(), old_end);
// return an iterator referring to one past the last erased element
return first;
}
__AGENCY_ANNOTATION
void push_back(const T& value)
{
emplace_back(value);
}
__AGENCY_ANNOTATION
void push_back(T&& value)
{
emplace_back(std::move(value));
}
template<class... Args>
__AGENCY_ANNOTATION
reference emplace_back(Args&&... args)
{
return *emplace(end(), std::forward<Args>(args)...);
}
__AGENCY_ANNOTATION
void pop_back()
{
erase(end()-1, end());
}
__AGENCY_ANNOTATION
void resize(size_type new_size)
{
resize(sequenced_execution_policy(), new_size);
}
template<class ExecutionPolicy, __AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
void resize(ExecutionPolicy&& policy, size_type new_size)
{
if(new_size < size())
{
detail::destroy(std::forward<ExecutionPolicy>(policy), storage_.allocator(), begin() + new_size, end());
end_ = begin() + new_size;
}
else
{
// XXX this should probably call emplace_n(end(), new_size - size()) rather than call T() here
insert(std::forward<ExecutionPolicy>(policy), storage_.allocator(), end(), new_size - size(), T());
}
}
__AGENCY_ANNOTATION
void resize(size_type new_size, const value_type& value)
{
return resize(sequenced_execution_policy(), new_size, value);
}
template<class ExecutionPolicy, __AGENCY_REQUIRES(is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value)>
__AGENCY_ANNOTATION
void resize(ExecutionPolicy&& policy, size_type new_size, const value_type& value)
{
if(new_size < size())
{
detail::destroy(std::forward<ExecutionPolicy>(policy), storage_.allocator(), begin() + new_size, end());
end_ = begin() + new_size;
}
else
{
insert(std::forward<ExecutionPolicy>(policy), end(), new_size - size(), value);
}
}
__AGENCY_ANNOTATION
void swap(vector& other)
{
storage_.swap(other.storage_);
detail::adl_swap(end_, other.end_);
}
__agency_exec_check_disable__
template<class Range>
__AGENCY_ANNOTATION
bool operator==(const Range& rhs) const
{
return size() == rhs.size() && detail::equal(begin(), end(), rhs.begin());
}
__agency_exec_check_disable__
template<class Range>
__AGENCY_ANNOTATION
bool operator!=(const Range& rhs) const
{
return size() != rhs.size() || !detail::equal(begin(), end(), rhs.begin());
}
private:
template<class ExecutionPolicy, class... InputIterator>
__AGENCY_ANNOTATION
iterator emplace_n(ExecutionPolicy&& policy, const_iterator position_, size_type count, InputIterator... iters)
{
// convert the const_iterator to an iterator
iterator position = begin() + (position_ - cbegin());
iterator result = position;
if(count <= (capacity() - size()))
{
// we've got room for all of the new elements
// how many existing elements will we displace?
size_type num_displaced_elements = end() - position;
iterator old_end = end();
if(num_displaced_elements > count)
{
// move n displaced elements to newly constructed elements following the insertion
// XXX we should really involve the allocator in construction here
end_ = detail::uninitialized_move_n(policy, end() - count, count, end());
// copy construct num_displaced_elements - n elements to existing elements
// this copy overlaps
size_type copy_length = (old_end - count) - position;
// XXX we should really involve the allocator in construction here
detail::overlapped_uninitialized_copy(policy, position, old_end - count, old_end - copy_length);
// XXX we should destroy the elements [position, position + num_displaced_elements) before constructing new ones
// construct new elements at insertion point
// XXX we should really involve the allocator in construction here
detail::construct_n(policy, position, count, iters...);
}
else
{
// move already existing, displaced elements to the end of the emplaced range, which is at position + count
// XXX we should really involve the allocator in construction here
end_ = detail::uninitialized_move_n(policy, position, num_displaced_elements, position + count);
// XXX we should destroy the elements [position, position + num_displaced_elements) before placement newing new ones
// construct new elements at the emplacement position
// XXX we should really involve the allocator in construction here
detail::construct_n(policy, position, count, iters...);
}
}
else
{
size_type old_size = size();
// compute the new capacity after the allocation
size_type new_capacity = old_size + detail::max(old_size, count);
// allocate exponentially larger new storage
new_capacity = detail::max(new_capacity, size_type(2) * capacity());
// do not exceed maximum storage
new_capacity = detail::min(new_capacity, max_size());
if(new_capacity > max_size())
{
detail::throw_length_error("insert(): insertion exceeds max_size().");
}
storage_type new_storage(new_capacity, storage_.allocator());
// record how many constructors we invoke in the try block below
iterator new_end = new_storage.data();
#ifndef __CUDA_ARCH__
try
#endif
{
// move elements before the insertion to the beginning of the new storage
// XXX we should really involve the allocator in construction here
new_end = detail::uninitialized_move_n(policy, begin(), position - begin(), new_storage.data());
result = new_end;
// copy construct new elements
// XXX we should really involve the allocator in construction here
new_end = detail::construct_n(policy, new_end, count, iters...);
// move elements after the insertion to the end of the new storage
// XXX we should really involve the allocator in construction here
new_end = detail::uninitialized_move_n(policy, position, end() - position, new_end);
}
#ifndef __CUDA_ARCH__
catch(...)
{
// something went wrong, so destroy as many new elements as were constructed
detail::destroy(policy, new_storage.allocator(), new_storage.data(), new_end);
// rethrow
throw;
}
#endif
// record the vector's new state
storage_.swap(new_storage);
end_ = new_end;
}
return result;
}
storage_type storage_;
iterator end_;
};
// TODO
template<class T, class Allocator>
__AGENCY_ANNOTATION
bool operator<(const vector<T,Allocator>& lhs, const vector<T,Allocator>& rhs);
// TODO
template<class T, class Allocator>
__AGENCY_ANNOTATION
bool operator<=(const vector<T,Allocator>& lhs, const vector<T,Allocator>& rhs);
// TODO
template<class T, class Allocator>
__AGENCY_ANNOTATION
bool operator>(const vector<T,Allocator>& lhs, const vector<T,Allocator>& rhs);
// TODO
template<class T, class Allocator>
__AGENCY_ANNOTATION
bool operator>=(const vector<T,Allocator>& lhs, const vector<T,Allocator>& rhs);
template<class T, class Allocator>
__AGENCY_ANNOTATION
void swap(vector<T,Allocator>& a, vector<T,Allocator>& b)
{
a.swap(b);
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/coordinate/lattice.hpp>
#include <agency/detail/index_lexicographical_rank.hpp>
#include <agency/execution/execution_agent/execution_agent_traits.hpp>
#include <utility>
namespace agency
{
namespace detail
{
template<class ExecutionCategory, class Index = size_t>
class basic_execution_agent
{
public:
using execution_category = ExecutionCategory;
using index_type = Index;
__AGENCY_ANNOTATION
index_type index() const
{
return index_;
}
using domain_type = lattice<index_type>;
__AGENCY_ANNOTATION
const domain_type& domain() const
{
return domain_;
}
using size_type = decltype(std::declval<domain_type>().size());
__AGENCY_ANNOTATION
size_type group_size() const
{
return domain().size();
}
__AGENCY_ANNOTATION
auto group_shape() const
-> decltype(this->domain().shape())
{
return domain().shape();
}
__AGENCY_ANNOTATION
size_type rank() const
{
return agency::detail::index_lexicographical_rank(index(), group_shape());
}
__AGENCY_ANNOTATION
bool elect() const
{
return rank() == 0;
}
class param_type
{
public:
__AGENCY_ANNOTATION
param_type() = default;
__AGENCY_ANNOTATION
param_type(const param_type& other) = default;
__AGENCY_ANNOTATION
param_type(const domain_type& d)
: domain_(d)
{}
__AGENCY_ANNOTATION
param_type(const index_type& min, const index_type& max)
: param_type(domain_type(min,max))
{}
__AGENCY_ANNOTATION
const domain_type& domain() const
{
return domain_;
}
private:
domain_type domain_;
};
__AGENCY_ANNOTATION
static domain_type domain(const param_type& p)
{
return p.domain();
}
protected:
__agency_exec_check_disable__
__AGENCY_ANNOTATION
basic_execution_agent(const index_type& index, const param_type& param) : index_(index), domain_(param.domain()) {}
friend struct agency::execution_agent_traits<basic_execution_agent>;
private:
index_type index_;
domain_type domain_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/memory/detail/unique_ptr.hpp>
#include <stdexcept>
#include <cassert>
#include <utility>
#include <type_traits>
#include <memory>
namespace agency
{
namespace detail
{
class bad_function_call : public std::exception
{
public:
virtual const char* what() const noexcept
{
return "bad_function_call: unique_function has no target";
}
};
namespace unique_function_detail
{
__AGENCY_ANNOTATION
inline void throw_bad_function_call()
{
#ifdef __CUDA_ARCH__
printf("bad_function_call: unique_function has no target\n");
assert(0);
#else
throw bad_function_call();
#endif
}
} // end unique_function_detail
template<class>
class unique_function;
template<class Result, class... Args>
class unique_function<Result(Args...)>
{
public:
using result_type = Result;
__AGENCY_ANNOTATION
unique_function() = default;
__AGENCY_ANNOTATION
unique_function(std::nullptr_t)
: f_ptr_(nullptr)
{}
__AGENCY_ANNOTATION
unique_function(unique_function&& other) = default;
template<class Function>
__AGENCY_ANNOTATION
unique_function(Function&& f)
: unique_function(std::allocator_arg, default_allocator<typename std::decay<Function>::type>{}, std::forward<Function>(f))
{}
template<class Alloc>
__AGENCY_ANNOTATION
unique_function(std::allocator_arg_t, const Alloc&, std::nullptr_t)
: unique_function(nullptr)
{}
template<class Alloc>
__AGENCY_ANNOTATION
unique_function(std::allocator_arg_t, const Alloc& alloc)
: unique_function(std::allocator_arg, alloc, nullptr)
{}
template<class Alloc>
__AGENCY_ANNOTATION
unique_function(std::allocator_arg_t, const Alloc&, unique_function&& other)
: f_ptr_(std::move(other.f_ptr_))
{}
template<class Alloc, class Function>
__AGENCY_ANNOTATION
unique_function(std::allocator_arg_t, const Alloc& alloc, Function&& f)
: f_ptr_(allocate_function_pointer(alloc, std::forward<Function>(f)))
{}
__AGENCY_ANNOTATION
unique_function& operator=(unique_function&& other) = default;
__AGENCY_ANNOTATION
Result operator()(Args... args) const
{
if(!*this)
{
unique_function_detail::throw_bad_function_call();
}
return (*f_ptr_)(args...);
}
__AGENCY_ANNOTATION
operator bool () const
{
return f_ptr_;
}
private:
// this is the abstract base class for a type
// which is both
// 1. callable like a function and
// 2. deallocates itself inside its destructor
struct callable_self_deallocator_base
{
using self_deallocate_function_type = void(*)(callable_self_deallocator_base*);
self_deallocate_function_type self_deallocate_function;
template<class Function>
__AGENCY_ANNOTATION
callable_self_deallocator_base(Function callback)
: self_deallocate_function(callback)
{}
__AGENCY_ANNOTATION
virtual ~callable_self_deallocator_base()
{
self_deallocate_function(this);
}
__AGENCY_ANNOTATION
virtual Result operator()(Args... args) const = 0;
};
template<class Function, class Alloc>
struct callable : callable_self_deallocator_base
{
using super_t = callable_self_deallocator_base;
using allocator_type = typename std::allocator_traits<Alloc>::template rebind_alloc<callable>;
mutable Function f_;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~callable() = default;
__agency_exec_check_disable__
template<class OtherFunction,
class = typename std::enable_if<
std::is_constructible<Function,OtherFunction&&>::value
>::type>
__AGENCY_ANNOTATION
callable(const Alloc&, OtherFunction&& f)
: super_t(deallocate),
f_(std::forward<OtherFunction>(f))
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
virtual Result operator()(Args... args) const
{
return f_(args...);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
static void deallocate(callable_self_deallocator_base* ptr)
{
// upcast to the right type of pointer
callable* self = static_cast<callable*>(ptr);
// XXX seems like creating a new allocator here is cheating
// we should use some member allocator, but it's not clear where to put it
allocator_type alloc_;
alloc_.deallocate(self, 1);
}
};
// this deleter calls the destructor of its argument but does not
// deallocate the ptr
// T will deallocate itself inside ~T()
struct self_deallocator_deleter
{
template<class T>
__AGENCY_ANNOTATION
void operator()(T* ptr) const
{
ptr->~T();
}
};
using function_pointer = detail::unique_ptr<callable_self_deallocator_base, self_deallocator_deleter>;
template<class Alloc, class Function>
__AGENCY_ANNOTATION
static function_pointer allocate_function_pointer(const Alloc& alloc, Function&& f)
{
using concrete_function_type = callable<typename std::decay<Function>::type, Alloc>;
return agency::detail::allocate_unique_with_deleter<concrete_function_type>(alloc, self_deallocator_deleter(), alloc, std::forward<Function>(f));
}
template<class T>
struct default_allocator
{
using value_type = T;
__AGENCY_ANNOTATION
default_allocator() = default;
__AGENCY_ANNOTATION
default_allocator(const default_allocator&) = default;
template<class U>
__AGENCY_ANNOTATION
default_allocator(const default_allocator<U>&) {}
// XXX we have to implement this member function superfluously because
// agency::detail::allocate_unique calls it directly instead of using std::allocator_traits
template<class U, class... OtherArgs>
__AGENCY_ANNOTATION
void construct(U* ptr, OtherArgs&&... args)
{
::new(ptr) U(std::forward<OtherArgs>(args)...);
}
value_type* allocate(size_t n)
{
std::allocator<T> alloc;
return alloc.allocate(n);
}
void deallocate(value_type* ptr, std::size_t n)
{
std::allocator<value_type> alloc;
alloc.deallocate(ptr, n);
}
};
function_pointer f_ptr_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/detail/member_future_or.hpp>
#include <future>
#include <type_traits>
#include <utility>
namespace agency
{
namespace detail
{
template<class Executor, class Function>
struct has_async_execute_impl
{
using result_type = result_of_t<Function()>;
using expected_future_type = member_future_or_t<Executor,result_type,std::future>;
template<class Executor1,
class ReturnType = decltype(
std::declval<Executor1>().async_execute(
std::declval<Function>()
)
),
class = typename std::enable_if<
std::is_same<ReturnType,expected_future_type>::value
>::type>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class Executor, class Function>
using has_async_execute = typename has_async_execute_impl<Executor, Function>::type;
template<class T>
struct is_asynchronous_executor_impl
{
// types related to functions passed to .async_execute()
using result_type = int;
// the function we'll pass to .async_execute() to test
// XXX WAR nvcc 8.0 bug
//using test_function = std::function<result_type()>;
struct test_function
{
result_type operator()();
};
using type = has_async_execute<
T,
test_function
>;
};
} // end detail
template<class T>
using is_asynchronous_executor = typename detail::is_asynchronous_executor_impl<T>::type;
namespace detail
{
// a fake Concept to use with __AGENCY_REQUIRES
template<class T>
constexpr bool AsynchronousExecutor()
{
return is_asynchronous_executor<T>();
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/customization_points/bulk_then_execute.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/factory.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/future.hpp>
namespace agency
{
namespace detail
{
namespace bulk_then_execute_without_shared_parameters_detail
{
// in general, the predecessor future's type is non-void
template<class Function, class Predecessor>
struct ignore_shared_parameters_and_invoke
{
mutable Function f;
template<class Index, class Result, class... IgnoredArgs>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Predecessor& predecessor, Result& result, IgnoredArgs&...) const
{
agency::detail::invoke(f, idx, predecessor, result);
}
};
// this specialization handles the void predecessor future case
template<class Function>
struct ignore_shared_parameters_and_invoke<Function,void>
{
mutable Function f;
template<class Index, class Result, class... IgnoredArgs>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Result& result, IgnoredArgs&...) const
{
agency::detail::invoke(f, idx, result);
}
};
template<size_t>
using factory_returning_ignored_result = agency::detail::unit_factory;
template<size_t... Indices, class E, class Function, class Future, class ResultFactory>
__AGENCY_ANNOTATION
executor_future_t<E, result_of_t<ResultFactory()>>
bulk_then_execute_without_shared_parameters_impl(index_sequence<Indices...>,
E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, ResultFactory result_factory)
{
using predecessor_type = future_value_t<Future>;
bulk_then_execute_without_shared_parameters_detail::ignore_shared_parameters_and_invoke<Function,predecessor_type> execute_me{f};
return agency::bulk_then_execute(exec,
execute_me, // the functor to execute
shape, // the number of agents to create
predecessor, // the predecessor future
result_factory, // the factory to create the result
factory_returning_ignored_result<Indices>()... // pass a factory for each level of execution hierarchy. the results of these factories will be ignored
);
}
} // end bulk_then_execute_without_shared_parameters_detail
template<class E, class Function, class Future, class ResultFactory,
__AGENCY_REQUIRES(BulkExecutor<E>())
>
__AGENCY_ANNOTATION
executor_future_t<E, result_of_t<ResultFactory()>>
bulk_then_execute_without_shared_parameters(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, ResultFactory result_factory)
{
return bulk_then_execute_without_shared_parameters_detail::bulk_then_execute_without_shared_parameters_impl(
detail::make_index_sequence<executor_execution_depth<E>::value>(),
exec,
f,
shape,
predecessor,
result_factory
);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/detail/algorithm/uninitialized_copy_n.hpp>
#include <agency/detail/iterator/move_iterator.hpp>
#include <utility>
namespace agency
{
namespace detail
{
template<class ExecutionPolicy, class Iterator1, class Size, class Iterator2,
__AGENCY_REQUIRES(
is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value
)>
__AGENCY_ANNOTATION
Iterator2 uninitialized_move_n(ExecutionPolicy&& policy, Iterator1 first, Size n, Iterator2 result)
{
return detail::uninitialized_copy_n(std::forward<ExecutionPolicy>(policy), detail::make_move_iterator(first), n, result);
}
template<class Iterator1, class Size, class Iterator2>
__AGENCY_ANNOTATION
Iterator2 uninitialized_move_n(Iterator1 first, Size n, Iterator2 result)
{
// pass this instead of agency::seq to work around the prohibition on
// taking the address of a global constexpr object (i.e., agency::seq) from a CUDA __device__ function
sequenced_execution_policy seq;
return detail::uninitialized_move_n(seq, first, n, result);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_policy/basic_execution_policy.hpp>
#include <agency/cuda/execution/execution_policy/parallel_execution_policy.hpp>
#include <agency/cuda/execution/execution_policy/concurrent_execution_policy.hpp>
#include <agency/cuda/execution/executor/grid_executor.hpp>
#include <agency/cuda/execution/executor/multidevice_executor.hpp>
#include <agency/cuda/execution/executor/scoped_executor.hpp>
#include <agency/cuda/device.hpp>
namespace agency
{
namespace cuda
{
// XXX consider making this a global object like the other execution policies
inline auto grid(size_t num_blocks, size_t num_threads) ->
decltype(
par(num_blocks, con(num_threads))
)
{
return par(num_blocks, con(num_threads));
};
// XXX consider making this a unique type instead of an alias
using grid_agent = parallel_group<concurrent_agent>;
// XXX need to figure out how to make this par(con) select grid_executor_2d automatically
// XXX consider making this a global object like the other execution policies
inline auto grid(size2 grid_dim, size2 block_dim) ->
decltype(
par2d(grid_dim, con2d(block_dim)).on(grid_executor_2d())
)
{
return par2d(grid_dim, con2d(block_dim)).on(grid_executor_2d());
}
// XXX consider making this a unique type instead of an alias
using grid_agent_2d = parallel_group_2d<concurrent_agent_2d>;
// this overload is called on e.g. par(con).on(device(0))
// XXX this function needs to account for the dimensionality of GridPolicy's agents
template<class GridPolicy,
__AGENCY_REQUIRES(
agency::detail::policy_is_scoped_parallel_concurrent<GridPolicy>::value
)>
__AGENCY_ANNOTATION
basic_execution_policy<cuda::grid_agent, cuda::grid_executor>
replace_executor(const GridPolicy& policy, device_id device)
{
// create a grid_executor
cuda::grid_executor exec(device);
return basic_execution_policy<cuda::grid_agent, cuda::grid_executor>(policy.param(), exec);
}
// this overload is called on e.g. par(con).on(all_devices())
// XXX this function needs to account for the dimensionality of GridPolicy's agents
template<class GridPolicy,
class Range,
__AGENCY_REQUIRES(
detail::is_range_of_device_id<Range>::value and
agency::detail::policy_is_scoped_parallel_concurrent<GridPolicy>::value
)>
basic_execution_policy<cuda::grid_agent, cuda::spanning_grid_executor>
replace_executor(const GridPolicy& policy, const Range& devices)
{
// turn the range of device_id into a vector of grid_executors
auto grid_executors = agency::cuda::detail::devices_to_grid_executors(devices);
spanning_grid_executor exec(grid_executors);
return basic_execution_policy<cuda::grid_agent, cuda::spanning_grid_executor>(policy.param(), exec);
}
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/detail/algorithm/uninitialized_copy_n.hpp>
#include <agency/detail/iterator/distance.hpp>
#include <iterator>
namespace agency
{
namespace detail
{
template<class ExecutionPolicy, class ForwardIterator, class OutputIterator,
__AGENCY_REQUIRES(
is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value
)>
__AGENCY_ANNOTATION
OutputIterator uninitialized_copy(ExecutionPolicy&& policy, ForwardIterator first, ForwardIterator last, OutputIterator result)
{
return agency::detail::uninitialized_copy_n(std::forward<ExecutionPolicy>(policy), first, agency::detail::distance(first,last), result);
}
template<class ForwardIterator, class OutputIterator>
__AGENCY_ANNOTATION
OutputIterator uninitialized_copy(ForwardIterator first, ForwardIterator last, OutputIterator result)
{
// pass this instead of agency::seq to work around the prohibition on
// taking the address of a global constexpr object (i.e., agency::seq) from a CUDA __device__ function
agency::sequenced_execution_policy seq;
return agency::detail::uninitialized_copy(seq, first, last, result);
}
} // end detail
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <cassert>
#include <algorithm>
#include <iostream>
#include <mutex>
#include <thread>
int main()
{
using namespace agency;
using inner_executor_type = concurrent_executor;
{
// test bulk_async_execute()
using executor_type = executor_array<inner_executor_type>;
using shape_type = executor_shape_t<executor_type>;
using index_type = executor_index_t<executor_type>;
using allocator_type = executor_allocator_t<executor_type, int>;
using int_container = bulk_result<int, shape_type, allocator_type>;
executor_type exec(2);
shape_type shape = exec.make_shape(3,5);
std::mutex mut;
auto f = agency::bulk_async_execute(exec, [=,&mut](const index_type& idx, int_container& results, int& outer_shared, int& inner_shared)
{
mut.lock();
std::cout << "Hello from agent " << idx << std::endl;
mut.unlock();
results[idx] = 13 + outer_shared + inner_shared;
},
shape,
[=]{ return int_container(shape); },
[]{ return 7; },
[]{ return 42; }
);
// sleep for a bit
mut.lock();
std::cout << "main thread sleeping for a bit..." << std::endl;
mut.unlock();
std::this_thread::sleep_for(std::chrono::seconds(1));
mut.lock();
std::cout << "main thread woke up" << std::endl;
mut.unlock();
auto results = f.get();
assert(results.size() == agency::detail::index_space_size(shape));
assert(std::all_of(results.begin(), results.end(), [](int x){ return x == 13 + 7 + 42; }));
}
{
// test bulk_then_execute()
using executor_type = executor_array<inner_executor_type>;
using shape_type = executor_shape_t<executor_type>;
using index_type = executor_index_t<executor_type>;
using allocator_type = executor_allocator_t<executor_type, int>;
using int_container = bulk_result<int, shape_type, allocator_type>;
executor_type exec(2);
auto past = agency::make_ready_future<int>(exec,1);
shape_type shape = exec.make_shape(3,5);
std::mutex mut;
auto f = agency::bulk_then_execute(exec, [=,&mut](const index_type& idx, int& past, int_container& results, int& outer_shared, int& inner_shared)
{
mut.lock();
std::cout << "Hello from agent " << idx << std::endl;
mut.unlock();
results[idx] = 13 + past + outer_shared + inner_shared;
},
shape,
past,
[=]{ return int_container(shape); },
[]{ return 7; },
[]{ return 42; });
// sleep for a bit
mut.lock();
std::cout << "main thread sleeping for a bit..." << std::endl;
mut.unlock();
std::this_thread::sleep_for(std::chrono::seconds(1));
mut.lock();
std::cout << "main thread woke up" << std::endl;
mut.unlock();
auto results = f.get();
assert(results.size() == agency::detail::index_space_size(shape));
assert(std::all_of(results.begin(), results.end(), [](int x){ return x == 13 + 1 + 7 + 42; }));
}
{
// test flattened executor_array
using executor_array_type = executor_array<inner_executor_type>;
using executor_type = flattened_executor<executor_array_type>;
using shape_type = executor_shape_t<executor_type>;
using index_type = executor_index_t<executor_type>;
using allocator_type = executor_allocator_t<executor_type, int>;
using int_container = bulk_result<int, shape_type, allocator_type>;
executor_array_type exec_array(2);
executor_type exec{exec_array};
shape_type shape = 10;
auto f = agency::bulk_async_execute(exec, [](const index_type& idx, int_container& results, int& shared)
{
results[idx] = 13 + shared;
},
shape,
[=]{ return int_container(shape); },
[]{ return 7; }
);
auto results = f.get();
assert(results.size() == agency::detail::index_space_size(shape));
assert(std::all_of(results.begin(), results.end(), [](int x){ return x == 13 + 7; }));
}
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_agent/detail/basic_execution_agent.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/coordinate/point.hpp>
namespace agency
{
using unsequenced_agent = detail::basic_execution_agent<unsequenced_execution_tag>;
using unsequenced_agent_1d = unsequenced_agent;
using unsequenced_agent_2d = detail::basic_execution_agent<unsequenced_execution_tag, size2>;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/detail/feature_test.hpp>
#include <cstddef>
#if __has_include(<cooperative_groups.h>)
#include <cooperative_groups.h>
#endif
namespace agency
{
namespace cuda
{
namespace detail
{
class grid_barrier
{
public:
__AGENCY_ANNOTATION
inline grid_barrier(std::size_t count)
#ifndef __CUDA_ARCH__
: count_(count)
#endif
{}
__AGENCY_ANNOTATION
grid_barrier(grid_barrier&&) = delete;
// use a deduced template non-type parameter here
// so that the static_assert below is raised only if .count() is called
template<bool deduced_false = false>
__AGENCY_ANNOTATION
inline int count() const
{
#ifndef __CUDA_ARCH__
// when called from __host__ code, just return count_
return count_;
#else
// when called from __device__ code...
#if(__cuda_lib_has_cooperative_groups)
// if we have cooperative_groups, use it
return cooperative_groups::this_grid().size();
#else
// if we don't have cooperative_groups, calculate the size of the grid ourself
return (blockDim.x * blockDim.y * blockDim.z) * (gridDim.x * gridDim.y * gridDim.z);
#endif
#endif
}
// use a deduced template non-type parameter here
// so that the static_assert below is raised only if .arrive_and_wait() is called
template<bool deduced_false = false>
__AGENCY_ANNOTATION
inline void arrive_and_wait()
{
#ifndef __CUDA_ARCH__
// when called from __host__ code, create a runtime error
assert(0);
#else
// when called from __device__ code...
#if(__cuda_lib_has_cooperative_groups)
return cooperative_groups::this_grid().sync();
#else
// if we haven't compiled the code correctly, create a compile-time error
static_assert(deduced_false, "Use of grid_barrier::arrive_and_wait() in __device__ code requires CUDA version >= 9, __CUDA_ARCH__ >= 600 and relocatable device code.");
#endif
#endif
}
private:
#ifndef __CUDA_ARCH__
int count_;
#endif
};
} // end detail
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/tuple/arithmetic_tuple_facade.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/make_tuple_if_not_scoped.hpp>
namespace agency
{
namespace detail
{
// index_tuple can't just be an alias for a particular kind of tuple
// because it also requires arithmetic operators
template<class... Indices>
class index_tuple :
public agency::tuple<Indices...>,
public arithmetic_tuple_facade<index_tuple<Indices...>>
{
public:
using agency::tuple<Indices...>::tuple;
};
template<class... Indices>
__AGENCY_ANNOTATION
index_tuple<Indices...> make_index_tuple(const std::tuple<Indices...>& indices)
{
return index_tuple<Indices...>(indices);
}
template<class... Args>
__AGENCY_ANNOTATION
index_tuple<decay_t<Args>...> make_index_tuple(Args&&... args)
{
return index_tuple<decay_t<Args>...>(std::forward<Args>(args)...);
}
struct index_tuple_maker
{
template<class... Args>
__AGENCY_ANNOTATION
auto operator()(Args&&... args) const
-> decltype(
make_index_tuple(std::forward<Args>(args)...)
)
{
return make_index_tuple(std::forward<Args>(args)...);
}
};
template<class ExecutionCategory1,
class ExecutionCategory2,
class Index1,
class Index2>
struct scoped_index
{
using type = decltype(
__tu::tuple_cat_apply(
detail::index_tuple_maker{},
detail::make_tuple_if_not_scoped<ExecutionCategory1>(std::declval<Index1>()),
detail::make_tuple_if_not_scoped<ExecutionCategory2>(std::declval<Index2>())
)
);
};
template<class ExecutionCategory1,
class ExecutionCategory2,
class Index1,
class Index2>
using scoped_index_t = typename scoped_index<
ExecutionCategory1,
ExecutionCategory2,
Index1,
Index2
>::type;
template<class ExecutionCategory1,
class ExecutionCategory2,
class Index1,
class Index2>
__AGENCY_ANNOTATION
scoped_index_t<ExecutionCategory1,ExecutionCategory2,Index1,Index2> make_scoped_index(const Index1& outer_idx, const Index2& inner_idx)
{
return __tu::tuple_cat_apply(
detail::index_tuple_maker{},
detail::make_tuple_if_not_scoped<ExecutionCategory1>(outer_idx),
detail::make_tuple_if_not_scoped<ExecutionCategory2>(inner_idx)
);
}
} // end detail
} // end agency
namespace std
{
template<class... Indices>
class tuple_size<agency::detail::index_tuple<Indices...>> : public std::tuple_size<agency::tuple<Indices...>> {};
template<size_t i, class... Indices>
class tuple_element<i,agency::detail::index_tuple<Indices...>> : public std::tuple_element<i,agency::tuple<Indices...>> {};
} // end namespace std
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/execution_agent/detail/basic_execution_agent.hpp>
#include <agency/detail/concurrency/barrier.hpp>
#include <agency/detail/concurrency/in_place_barrier.hpp>
#include <agency/container/array.hpp>
#include <agency/experimental/optional.hpp>
#include <agency/experimental/variant.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class Index, class Barrier, class MemoryResource>
class basic_concurrent_agent : public detail::basic_execution_agent<concurrent_execution_tag, Index>
{
private:
using super_t = detail::basic_execution_agent<concurrent_execution_tag, Index>;
// wrap the user's Barrier type with in_place_barrier so that we may use
// in_place_type_t with its constructor
using barrier_type = in_place_barrier<Barrier>;
static constexpr size_t broadcast_channel_size = sizeof(void*);
using broadcast_channel_type = agency::array<char, broadcast_channel_size>;
// this function destroys *ptr if ptr is not null and then makes the entire group wait
// only one agent should pass a non-nullptr to this function
// the entire group should be convergent before calling this function
template<class T>
__AGENCY_ANNOTATION
typename std::enable_if<
std::is_trivially_destructible<T>::value
>::type
destroy_and_wait_if(T*)
{
// no op: T has a trivial destructor, so there's no need to do anything
// including synchronize
}
// this function destroys *ptr if ptr is not null and then makes the entire group wait
// only one agent should pass a non-nullptr to this function
// the entire group should be convergent before calling this function
template<class T>
__AGENCY_ANNOTATION
typename std::enable_if<
!std::is_trivially_destructible<T>::value
>::type
destroy_and_wait_if(T* ptr)
{
// first destroy the object
if(ptr)
{
ptr->~T();
}
// synchronize the group
wait();
}
// this overload of broadcast_impl() is for small T
template<class T,
__AGENCY_REQUIRES(
(sizeof(T) <= broadcast_channel_size)
)>
__AGENCY_ANNOTATION
T broadcast_impl(const experimental::optional<T>& value)
{
// value is small enough to fit inside broadcast_channel_, so we can
// send it through directly without needing to dynamically allocate storage
// reinterpret the broadcast channel into the right kind of type
T* shared_temporary_object = reinterpret_cast<T*>(shared_param_.broadcast_channel_.data());
// the thread with the value copies it into a shared temporary
if(value)
{
// copy construct the shared temporary
::new(shared_temporary_object) T(*value);
}
// all agents wait for the object to be ready
wait();
// copy the shared temporary to a local variable
T result = *shared_temporary_object;
// all agents wait for all other agents to finish copying the shared temporary
wait();
// destroy the object and all agents wait for the broadcast to become ready again
destroy_and_wait_if(value ? shared_temporary_object : nullptr);
return result;
}
// this overload of broadcast_impl() is for large T
template<class T,
__AGENCY_REQUIRES(
(sizeof(T) > broadcast_channel_size)
)>
__AGENCY_ANNOTATION
T broadcast_impl(const experimental::optional<T>& value)
{
// value is too large to fit through broadcast_channel_, so
// we need to dynamically allocate storage
// reinterpret the broadcast channel into a pointer
static_assert(sizeof(broadcast_channel_type) >= sizeof(T*), "broadcast channel is too small to accomodate T*");
T* shared_temporary_object = reinterpret_cast<T*>(&shared_param_.broadcast_channel_);
if(value)
{
// dynamically allocate the shared temporary object
shared_temporary_object = memory_resource().allocate<alignof(T)>(sizeof(T));
// copy construct the shared temporary
::new(shared_temporary_object) T(*value);
}
// all agents wait for the object to be ready
wait();
// copy the shared temporary to a local variable
T result = *shared_temporary_object;
// all agents wait for other agents to finish copying the shared temporary
wait();
if(value)
{
// destroy the shared temporary
shared_temporary_object->~T();
// deallocate the temporary storage
memory_resource().deallocate(shared_temporary_object);
}
// all agents wait for the broadcast channel and memory resource to become ready again
wait();
return result;
}
public:
using param_type = typename super_t::param_type;
using index_type = typename super_t::index_type;
__AGENCY_ANNOTATION
void wait() const
{
shared_param_.barrier_.arrive_and_wait();
}
template<class T>
__AGENCY_ANNOTATION
T broadcast(const experimental::optional<T>& value)
{
return broadcast_impl(value);
}
using memory_resource_type = MemoryResource;
__AGENCY_ANNOTATION
memory_resource_type& memory_resource()
{
return shared_param_.memory_resource_;
}
class shared_param_type
{
public:
__AGENCY_ANNOTATION
shared_param_type(const param_type& param)
: barrier_(param.domain().size()),
memory_resource_()
{
// note we specifically avoid default constructing broadcast_channel_
}
template<class OtherBarrier,
__AGENCY_REQUIRES(
std::is_constructible<barrier_type, experimental::in_place_type_t<OtherBarrier>, std::size_t>::value
)>
__AGENCY_ANNOTATION
shared_param_type(const param_type& param, experimental::in_place_type_t<OtherBarrier> which_barrier)
: barrier_(which_barrier, param.domain().size()),
memory_resource_()
{
// note we specifically avoid default constructing broadcast_channel_
}
// shared_param_type needs to be moveable, even if its member types aren't,
// because shared_param_type objects will be returned from factory functions
// at the moment, this requires moveability
//
// XXX we should be able to eliminate this move constructor in C++17
// see wg21.link/P0135
__AGENCY_ANNOTATION
shared_param_type(shared_param_type&& other)
: barrier_(other.barrier_.index(), other.barrier_.count()),
memory_resource_()
{}
private:
// broadcast_channel_ needs to be the first member to ensure proper alignment because we reinterpret it to arbitrary T*
// XXX is there a more comprehensive way to ensure that this member falls on the right address?
broadcast_channel_type broadcast_channel_;
barrier_type barrier_;
memory_resource_type memory_resource_;
friend basic_concurrent_agent;
};
private:
shared_param_type& shared_param_;
protected:
__AGENCY_ANNOTATION
basic_concurrent_agent(const index_type& index, const param_type& param, shared_param_type& shared_param)
: super_t(index, param),
shared_param_(shared_param)
{}
// friend execution_agent_traits to give it access to the constructor
friend struct agency::execution_agent_traits<basic_concurrent_agent>;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/future.hpp>
#include <agency/execution/executor/executor_traits/executor_future.hpp>
#include <agency/execution/executor/executor_traits/is_executor.hpp>
#include <utility>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class Executor, class T, class... Args>
struct has_make_ready_future_impl
{
template<
class Executor2,
typename = decltype(
std::declval<Executor2&>().template make_ready_future<T>(
std::declval<Args>()...
)
)
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class Executor, class T, class... Args>
using has_make_ready_future = typename has_make_ready_future_impl<Executor,T,Args...>::type;
// this overload handles the case of executors which have the member function .make_ready_future()
__agency_exec_check_disable__
template<class T, class Executor, class... Args>
__AGENCY_ANNOTATION
executor_future_t<Executor,T>
make_ready_future_impl(std::true_type, Executor& exec, Args&&... args)
{
return exec.template make_ready_future<T>(std::forward<Args>(args)...);
} // end make_ready_future_impl()
// this overload handles the case of executors which do not have the member function .make_ready_future()
template<class T, class Executor, class... Args>
__AGENCY_ANNOTATION
executor_future_t<Executor,T>
make_ready_future_impl(std::false_type, Executor&, Args&&... args)
{
using future_type = executor_future_t<Executor,T>;
return future_traits<future_type>::template make_ready<T>(std::forward<Args>(args)...);
} // end make_ready_future_impl()
} // end detail
template<class T, class E, class... Args,
__AGENCY_REQUIRES(detail::Executor<E>())
>
__AGENCY_ANNOTATION
executor_future_t<E,T> make_ready_future(E& exec, Args&&... args)
{
using check_for_member_function = detail::has_make_ready_future<
E,
T,
Args&&...
>;
return detail::make_ready_future_impl<T>(check_for_member_function(), exec, std::forward<Args>(args)...);
} // end make_ready_future()
} // end agency
<file_sep>#pragma once
#include <agency/execution/execution_categories.hpp>
#include <agency/tuple.hpp>
#include <utility>
namespace agency
{
namespace detail
{
template<class ExecutionCategory, class Tuple>
__AGENCY_ANNOTATION
static auto unwrap_tuple_if_not_scoped_impl(ExecutionCategory, Tuple&& t)
-> decltype(agency::get<0>(std::forward<Tuple>(t)))
{
return agency::get<0>(std::forward<Tuple>(t));
}
template<class ExecutionCategory1, class ExecutionCategory2, class Tuple>
__AGENCY_ANNOTATION
static auto unwrap_tuple_if_not_scoped_impl(scoped_execution_tag<ExecutionCategory1,ExecutionCategory2>, Tuple&& t)
-> decltype(std::forward<Tuple>(t))
{
return std::forward<Tuple>(t);
}
template<class ExecutionCategory, class Tuple>
__AGENCY_ANNOTATION
auto unwrap_tuple_if_not_scoped(Tuple&& t)
-> decltype(
unwrap_tuple_if_not_scoped_impl(ExecutionCategory(), std::forward<Tuple>(t))
)
{
return unwrap_tuple_if_not_scoped_impl(ExecutionCategory(), std::forward<Tuple>(t));
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/bulk_invoke.hpp>
#include <agency/execution/execution_policy/detail/simple_sequenced_policy.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/iterator/iterator_traits.hpp>
#include <cassert>
#include <cstdio>
namespace agency
{
namespace detail
{
namespace construct_n_detail
{
struct construct_n_functor
{
__agency_exec_check_disable__
template<class Agent, class RandomAccessIterator, class... RandomAccessIterators>
__AGENCY_ANNOTATION
void operator()(Agent& self, RandomAccessIterator first, RandomAccessIterators... iters)
{
auto i = self.rank();
::new(static_cast<void*>(&first[i])) typename std::iterator_traits<RandomAccessIterator>::value_type(iters[i]...);
}
};
template<class... Args>
__AGENCY_ANNOTATION
int swallow(Args&&...)
{
return 0;
}
} // end construct_n_detail
// this overload is for cases where we need not execute sequentially:
// 1. ExecutionPolicy is not sequenced AND
// 2. Iterators are random access
template<class ExecutionPolicy, class RandomAccessIterator, class Size, class... RandomAccessIterators,
__AGENCY_REQUIRES(
is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value
),
__AGENCY_REQUIRES(
!policy_is_sequenced<decay_t<ExecutionPolicy>>::value and
iterators_are_random_access<RandomAccessIterator,RandomAccessIterators...>::value
)>
__AGENCY_ANNOTATION
RandomAccessIterator construct_n(ExecutionPolicy&& policy, RandomAccessIterator first, Size n, RandomAccessIterators... iters)
{
agency::bulk_invoke(policy(n), construct_n_detail::construct_n_functor(), first, iters...);
return first + n;
}
// this overload is for cases where we must execute sequentially
// 1. ExecutionPolicy is sequenced OR
// 2. Iterators are not random access
__agency_exec_check_disable__
template<class ExecutionPolicy, class Iterator, class Size, class... Iterators,
__AGENCY_REQUIRES(
is_execution_policy<typename std::decay<ExecutionPolicy>::type>::value
),
__AGENCY_REQUIRES(
policy_is_sequenced<decay_t<ExecutionPolicy>>::value or
!iterators_are_random_access<Iterator,Iterators...>::value
)>
__AGENCY_ANNOTATION
Iterator construct_n(ExecutionPolicy&&, Iterator first, Size n, Iterators... iters)
{
using value_type = typename std::iterator_traits<Iterator>::value_type;
for(Size i = 0; i < n; ++i, ++first, construct_n_detail::swallow(++iters...))
{
::new(&*first) value_type(*iters...);
}
return first;
}
// XXX we introduce an Allocator parameter here if we can figure out how to support it
// internally, construct_n would call allocator_traits<Allocator>::construct(alloc, ...)
template<class Iterator, class Size, class... Iterators,
__AGENCY_REQUIRES(
// XXX we have no is_iterator, so just use the negation of the requirement used above
!is_execution_policy<Iterator>::value
)>
__AGENCY_ANNOTATION
Iterator construct_n(Iterator first, Size n, Iterators... iters)
{
// use simple_sequenced_policy here to avoid circular dependencies
// created by the use of sequenced_policy
simple_sequenced_policy seq;
return detail::construct_n(seq, first, n, iters...);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/tuple.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_async_execute_with_collected_result.hpp>
#include <agency/execution/executor/customization_points/future_cast.hpp>
#include <agency/detail/control_structures/executor_functions/bind_agent_local_parameters.hpp>
#include <agency/detail/control_structures/executor_functions/unpack_shared_parameters_from_executor_and_invoke.hpp>
#include <agency/detail/control_structures/executor_functions/bulk_invoke_executor.hpp>
#include <agency/detail/control_structures/executor_functions/result_factory.hpp>
#include <agency/detail/control_structures/scope_result.hpp>
#include <agency/detail/control_structures/decay_parameter.hpp>
#include <agency/detail/type_traits.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
// this overload handles the general case where the user function returns a normal result
template<class E, class Function, class ResultFactory, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
executor_future_t<E, result_of_t<ResultFactory()>>
bulk_async_executor_impl(E& exec,
Function f,
ResultFactory result_factory,
executor_shape_t<E> shape,
Tuple&& shared_factory_tuple,
detail::index_sequence<TupleIndices...>)
{
return detail::bulk_async_execute_with_collected_result(exec, f, shape, result_factory, agency::get<TupleIndices>(std::forward<Tuple>(shared_factory_tuple))...);
}
// this overload handles the special case where the user function returns a scope_result
// the reason we need this case cannot be handled by the overload above is because, unlike the above case,
// there is an intermediate future which must be converted to the right type of result fututre
template<class E, class Function, size_t scope, class T, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
executor_future_t<E, typename detail::scope_result_container<scope,T,E>::result_type>
bulk_async_executor_impl(E& exec,
Function f,
construct<detail::scope_result_container<scope,T,E>, executor_shape_t<E>> result_factory,
executor_shape_t<E> shape,
Tuple&& shared_factory_tuple,
detail::index_sequence<TupleIndices...>)
{
auto intermediate_future = detail::bulk_async_execute_with_collected_result(exec, f, shape, result_factory, agency::get<TupleIndices>(std::forward<Tuple>(shared_factory_tuple))...);
using result_type = typename detail::scope_result_container<scope,T,E>::result_type;
// cast the intermediate_future to result_type
return agency::future_cast<result_type>(exec, intermediate_future);
}
// this overload handles the special case where the user function returns void
template<class E, class Function, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
executor_future_t<E,void>
bulk_async_executor_impl(E& exec,
Function f,
void_factory,
executor_shape_t<E> shape,
Tuple&& factory_tuple,
detail::index_sequence<TupleIndices...>)
{
return detail::bulk_async_execute_with_void_result(exec, f, shape, agency::get<TupleIndices>(std::forward<Tuple>(factory_tuple))...);
}
// computes the result type of bulk_async(executor)
template<class Executor, class Function, class... Args>
struct bulk_async_executor_result
{
using type = executor_future_t<
Executor, bulk_invoke_executor_result_t<Executor,Function,Args...>
>;
};
template<class Executor, class Function, class... Args>
using bulk_async_executor_result_t = typename bulk_async_executor_result<Executor,Function,Args...>::type;
template<class Executor, class Function, class... Args>
__AGENCY_ANNOTATION
bulk_async_executor_result_t<Executor, Function, Args...>
bulk_async_executor(Executor& exec, executor_shape_t<Executor> shape, Function f, Args&&... args)
{
// the _1 is for the executor idx parameter, which is the first parameter passed to f
auto g = detail::bind_agent_local_parameters_workaround_nvbug1754712(std::integral_constant<size_t,1>(), f, detail::placeholders::_1, std::forward<Args>(args)...);
// make a tuple of the shared args
auto shared_arg_tuple = detail::forward_shared_parameters_as_tuple(std::forward<Args>(args)...);
// package up the shared parameters for the executor
const size_t execution_depth = executor_execution_depth<Executor>::value;
// create a tuple of factories to use for shared parameters for the executor
auto factory_tuple = agency::detail::make_shared_parameter_factory_tuple<execution_depth>(shared_arg_tuple);
// unpack shared parameters we receive from the executor
auto h = detail::make_unpack_shared_parameters_from_executor_and_invoke(g);
// compute the type of f's result
using result_of_f = result_of_t<Function(executor_index_t<Executor>,decay_parameter_t<Args>...)>;
// based on the type of f's result, make a factory that will create the appropriate type of container to store f's results
auto result_factory = detail::make_result_factory<result_of_f>(exec, shape);
return detail::bulk_async_executor_impl(exec, h, result_factory, shape, factory_tuple, detail::make_index_sequence<execution_depth>());
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/executor_traits/detail/member_execution_category_or.hpp>
#include <agency/execution/execution_categories.hpp>
namespace agency
{
namespace detail
{
template<class BulkExecutor, bool Enable = is_bulk_executor<BulkExecutor>::value>
struct executor_execution_category_impl
{
};
template<class BulkExecutor>
struct executor_execution_category_impl<BulkExecutor,true>
{
using type = member_execution_category_or_t<BulkExecutor,unsequenced_execution_tag>;
};
} // end detail
template<class BulkExecutor>
struct executor_execution_category : detail::executor_execution_category_impl<BulkExecutor> {};
template<class BulkExecutor>
using executor_execution_category_t = typename executor_execution_category<BulkExecutor>::type;
} // end agency
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
// XXX use parallel_executor.hpp instead of thread_pool.hpp due to circular #inclusion problems
#include <agency/execution/executor/parallel_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points.hpp>
int main()
{
using namespace agency;
static_assert(is_bulk_continuation_executor<detail::thread_pool_executor>::value,
"thread_pool_executor should be a bulk continuation executor");
static_assert(is_bulk_executor<detail::thread_pool_executor>::value,
"thread_pool_executor should be a bulk executor");
static_assert(detail::is_detected_exact<parallel_execution_tag, executor_execution_category_t, detail::thread_pool_executor>::value,
"thread_pool_executor should have parallel_execution_tag execution_category");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, detail::thread_pool_executor>::value,
"thread_pool_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, detail::thread_pool_executor>::value,
"thread_pool_executor should have size_t index_type");
static_assert(detail::is_detected_exact<std::future<int>, executor_future_t, detail::thread_pool_executor, int>::value,
"thread_pool_executor should have std::future future");
static_assert(executor_execution_depth<detail::thread_pool_executor>::value == 1,
"thread_pool_executor should have execution_depth == 1");
detail::thread_pool_executor exec;
{
// bulk_sync_execute()
size_t shape = 10;
auto result = exec.bulk_sync_execute(
[](size_t idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = shared_arg[idx] - 1;
},
shape,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
assert(std::vector<int>(10, 13 - 1) == result);
}
{
// bulk_then_execute() with non-void predecessor
std::future<int> predecessor_fut = agency::make_ready_future<int>(exec, 7);
size_t shape = 10;
auto f = exec.bulk_then_execute(
[](size_t idx, int& predecessor, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = predecessor + shared_arg[idx];
},
shape,
predecessor_fut,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(10, 7 + 13) == result);
}
{
// bulk_then_execute() with void predecessor
std::future<void> predecessor_fut = agency::make_ready_future<void>(exec);
size_t shape = 10;
auto f = exec.bulk_then_execute(
[](size_t idx, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = shared_arg[idx];
},
shape,
predecessor_fut,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(10, 13) == result);
}
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#include <iostream>
#include <cassert>
#include <algorithm>
#include <vector>
#include <list>
#include <agency/container/vector.hpp>
#include <agency/execution/execution_policy.hpp>
template<class Container>
void test_range_constructor()
{
using namespace agency;
using value_type = typename Container::value_type;
size_t num_elements_to_insert = 5;
Container items(num_elements_to_insert);
std::iota(items.begin(), items.end(), 0);
vector<value_type> v(items.begin(), items.end());
assert(v.size() == num_elements_to_insert);
assert(std::equal(v.begin(), v.end(), items.begin()));
}
template<class Container, class ExecutionPolicy>
void test_range_constructor(ExecutionPolicy policy)
{
using namespace agency;
using value_type = typename Container::value_type;
size_t num_elements_to_insert = 5;
Container items(num_elements_to_insert);
std::iota(items.begin(), items.end(), 0);
vector<value_type> v(items.begin(), items.end());
assert(v.size() == num_elements_to_insert);
assert(std::equal(v.begin(), v.end(), items.begin()));
}
int main()
{
{
// test construction from std::vector
test_range_constructor<std::vector<int>>();
test_range_constructor<std::vector<int>>(agency::par);
}
{
// test construction from std::list, which has iterators which do not parallelize
test_range_constructor<std::list<int>>();
}
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/tuple/tuple_utility.hpp>
#include <tuple>
#include <type_traits>
#include <iostream>
namespace agency
{
namespace detail
{
template<typename Derived>
class arithmetic_tuple_facade;
template<class T>
struct is_arithmetic_tuple : std::is_base_of<arithmetic_tuple_facade<T>, T> {};
template<typename Derived>
class arithmetic_tuple_facade
{
private:
struct assign
{
template<typename T1, typename T2>
__AGENCY_ANNOTATION
T1& operator()(T1 &lhs, const T2& rhs) const
{
return lhs = rhs;
}
};
struct plus_assign
{
template<typename T1, typename T2>
__AGENCY_ANNOTATION
T1& operator()(T1 &lhs, const T2& rhs) const
{
return lhs += rhs;
}
};
struct minus_assign
{
template<typename T1, typename T2>
__AGENCY_ANNOTATION
T1& operator()(T1 &lhs, const T2& rhs) const
{
return lhs -= rhs;
}
};
struct multiplies_assign
{
template<typename T1, typename T2>
__AGENCY_ANNOTATION
T1& operator()(T1 &lhs, const T2& rhs) const
{
return lhs *= rhs;
}
};
template<typename T>
struct assign_constant
{
T c;
template<typename U>
__AGENCY_ANNOTATION
U& operator()(U& x) const
{
return x = c;
}
};
template<typename T>
struct multiplies_assign_constant
{
T c;
template<typename U>
__AGENCY_ANNOTATION
U& operator()(U& x) const
{
return x *= c;
}
};
struct divides_assign
{
template<typename T1, typename T2>
__AGENCY_ANNOTATION
T1& operator()(T1 &lhs, const T2& rhs) const
{
return lhs /= rhs;
}
};
template<typename T>
struct divides_assign_constant
{
T c;
template<typename U>
__AGENCY_ANNOTATION
U& operator()(U& x) const
{
return x /= c;
}
};
struct make_derived
{
template<class... Args>
__AGENCY_ANNOTATION
Derived operator()(Args&&... args) const
{
return Derived{std::forward<Args>(args)...};
}
};
__AGENCY_ANNOTATION Derived& derived()
{
return static_cast<Derived&>(*this);
}
__AGENCY_ANNOTATION const Derived& derived() const
{
return static_cast<const Derived&>(*this);
}
protected:
template<class Arithmetic>
__AGENCY_ANNOTATION
typename std::enable_if<
std::is_arithmetic<Arithmetic>::value
>::type
fill(const Arithmetic& val)
{
return __tu::tuple_for_each(assign_constant<Arithmetic>{val}, derived());
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
void copy(const ArithmeticTuple& src)
{
return __tu::tuple_for_each(assign{}, derived(), src);
}
public:
// fused op-assignment
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION Derived& operator*=(const ArithmeticTuple& rhs)
{
__tu::tuple_for_each(multiplies_assign{}, derived(), rhs);
return derived();
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION Derived& operator/=(const ArithmeticTuple& rhs)
{
__tu::tuple_for_each(divides_assign{}, derived(), rhs);
return derived();
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION Derived& operator+=(const ArithmeticTuple& rhs)
{
__tu::tuple_for_each(plus_assign{}, derived(), rhs);
return derived();
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION Derived& operator-=(const ArithmeticTuple& rhs)
{
__tu::tuple_for_each(minus_assign{}, derived(), rhs);
return derived();
}
// multiply by scalar
template<class Arithmetic>
__AGENCY_ANNOTATION
typename std::enable_if<
std::is_arithmetic<Arithmetic>::value,
Derived&
>::type
operator*=(const Arithmetic& rhs)
{
__tu::tuple_for_each(multiplies_assign_constant<Arithmetic>(rhs), derived());
return derived();
}
// divide by scalar
template<class Arithmetic>
__AGENCY_ANNOTATION
typename std::enable_if<
std::is_arithmetic<Arithmetic>::value,
Derived&
>::type
operator/=(const Arithmetic& rhs)
{
__tu::tuple_for_each(divides_assign_constant<Arithmetic>(rhs), derived());
return derived();
}
// ops
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
Derived operator+(const ArithmeticTuple& rhs) const
{
Derived result = derived();
static_cast<arithmetic_tuple_facade&>(result) += rhs;
return result;
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
Derived operator-(const ArithmeticTuple& rhs) const
{
Derived result = derived();
static_cast<arithmetic_tuple_facade&>(result) -= rhs;
return result;
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
Derived operator*(const ArithmeticTuple& rhs) const
{
Derived result = derived();
static_cast<arithmetic_tuple_facade&>(result) *= rhs;
return result;
}
template<class ArithmeticTuple,
class = typename std::enable_if<
std::tuple_size<Derived>::value == std::tuple_size<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
Derived operator/(const ArithmeticTuple& rhs) const
{
Derived result = derived();
static_cast<arithmetic_tuple_facade&>(result) /= rhs;
return result;
}
// equality
template<class ArithmeticTuple,
class = typename std::enable_if<
is_arithmetic_tuple<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
bool operator==(const ArithmeticTuple& rhs) const
{
return __tu::tuple_equal(derived(), rhs);
}
template<class ArithmeticTuple,
class = typename std::enable_if<
is_arithmetic_tuple<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
bool operator!=(const ArithmeticTuple& rhs) const
{
return !operator==(rhs);
}
// relational ops
template<class ArithmeticTuple,
class = typename std::enable_if<
is_arithmetic_tuple<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
bool operator<(const ArithmeticTuple& rhs) const
{
return __tu::tuple_lexicographical_compare(derived(), rhs);
}
template<class ArithmeticTuple,
class = typename std::enable_if<
is_arithmetic_tuple<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
bool operator>(const ArithmeticTuple& rhs) const
{
return __tu::tuple_lexicographical_compare(rhs, derived());
}
template<class ArithmeticTuple,
class = typename std::enable_if<
is_arithmetic_tuple<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
bool operator<=(const ArithmeticTuple& rhs) const
{
return !operator>(rhs);
}
template<class ArithmeticTuple,
class = typename std::enable_if<
is_arithmetic_tuple<ArithmeticTuple>::value
>::type>
__AGENCY_ANNOTATION
bool operator>=(const ArithmeticTuple& rhs) const
{
return !operator<(rhs);
}
private:
// define our own print_tuple() function to use instead of __tu::print_tuple()
// to avoid nvcc's warnings
template<size_t BeginIndex, __AGENCY_REQUIRES(BeginIndex == std::tuple_size<Derived>::value)>
static void print_tuple(std::ostream&, const Derived&)
{
}
template<size_t BeginIndex, __AGENCY_REQUIRES(BeginIndex < std::tuple_size<Derived>::value)>
static void print_tuple(std::ostream& os, const Derived& t)
{
os << __tu::__get<BeginIndex>(t);
// insert a delimiter after elements except for the last
if(BeginIndex < std::tuple_size<Derived>::value - 1)
{
os << ", ";
}
arithmetic_tuple_facade::print_tuple<BeginIndex+1>(os, t);
}
public:
friend std::ostream& operator<<(std::ostream& os, const arithmetic_tuple_facade& t)
{
os << "{";
arithmetic_tuple_facade::print_tuple<0>(os, t.derived());
os << "}";
return os;
}
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/unit.hpp>
namespace agency
{
namespace detail
{
// this functor is used by bulk_*_execute_with_void_result()
// this definition is used when there is a non-void predecessor parameter
template<class Function, class Predecessor = void>
struct ignore_unit_result_parameter_and_invoke
{
mutable Function f;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
ignore_unit_result_parameter_and_invoke() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
ignore_unit_result_parameter_and_invoke(const ignore_unit_result_parameter_and_invoke&) = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~ignore_unit_result_parameter_and_invoke() = default;
template<class Index, class... SharedParameters>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Predecessor& predecessor, unit&, SharedParameters&... shared_parameters) const
{
agency::detail::invoke(f, idx, predecessor, shared_parameters...);
}
};
// this is the specialization used when there is no predecessor parameter
template<class Function>
struct ignore_unit_result_parameter_and_invoke<Function,void>
{
mutable Function f;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
ignore_unit_result_parameter_and_invoke() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
ignore_unit_result_parameter_and_invoke(const ignore_unit_result_parameter_and_invoke&) = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~ignore_unit_result_parameter_and_invoke() = default;
template<class Index, class... SharedParameters>
__AGENCY_ANNOTATION
void operator()(const Index& idx, unit&, SharedParameters&... shared_parameters) const
{
agency::detail::invoke(f, idx, shared_parameters...);
}
};
// this functor is used by bulk_*_execute_with_collected_result()
// this definition is used when there is a non-void predecessor parameter
template<class Function, class Predecessor = void>
struct invoke_and_collect_result
{
mutable Function f;
__agency_exec_check_disable__
template<class Index, class Collection, class... SharedParameters>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Predecessor& predecessor, Collection& results, SharedParameters&... shared_parameters) const
{
results[idx] = agency::detail::invoke(f, idx, predecessor, shared_parameters...);
}
};
// this is the specialization used when there is no predecessor parameter
template<class Function>
struct invoke_and_collect_result<Function,void>
{
mutable Function f;
__agency_exec_check_disable__
template<class Index, class Collection, class... SharedParameters>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Collection& results, SharedParameters&... shared_parameters) const
{
results[idx] = agency::detail::invoke(f, idx, shared_parameters...);
}
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/future.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
#include <thread>
#include <vector>
#include <memory>
#include <algorithm>
#include <utility>
namespace agency
{
class concurrent_executor
{
public:
using execution_category = concurrent_execution_tag;
size_t unit_shape() const
{
constexpr size_t default_result = 1;
size_t hw_concurrency = std::thread::hardware_concurrency();
// hardware_concurency() is allowed to return 0, so guard against a 0 result
return hw_concurrency ? hw_concurrency : default_result;
}
template<class Function, class Future, class ResultFactory, class SharedFactory>
std::future<
detail::result_of_t<ResultFactory()>
>
bulk_then_execute(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory)
{
return bulk_then_execute_impl(f, n, predecessor, result_factory, shared_factory);
}
private:
template<class Function, class Future, class ResultFactory, class SharedFactory>
std::future<agency::detail::result_of_t<ResultFactory()>>
bulk_then_execute_impl(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory,
typename std::enable_if<
!std::is_void<
typename agency::future_traits<Future>::value_type
>::value
>::type* = 0)
{
if(n > 0)
{
using predecessor_type = typename agency::future_traits<Future>::value_type;
return agency::detail::monadic_then(predecessor, std::launch::async, [=](predecessor_type& predecessor) mutable
{
// put all the shared parameters on the first thread's stack
auto result = result_factory();
auto shared_parameter = shared_factory();
// create a lambda to handle parameter passing
auto g = [&,f](size_t idx)
{
agency::detail::invoke(f, idx, predecessor, result, shared_parameter);
};
size_t mid = n / 2;
std::future<void> left = agency::detail::make_ready_future();
if(0 < mid)
{
left = this->async_execute(g, 0, mid);
}
std::future<void> right = agency::detail::make_ready_future();
if(mid + 1 < n)
{
right = this->async_execute(g, mid + 1, n);
}
g(mid);
left.wait();
right.wait();
return std::move(result);
});
}
return agency::detail::make_ready_future(result_factory());
}
template<class Function, class Future, class ResultFactory, class SharedFactory>
std::future<agency::detail::result_of_t<ResultFactory()>>
bulk_then_execute_impl(Function f, size_t n, Future& predecessor, ResultFactory result_factory, SharedFactory shared_factory,
typename std::enable_if<
std::is_void<
typename agency::future_traits<Future>::value_type
>::value
>::type* = 0)
{
if(n > 0)
{
return agency::detail::monadic_then(predecessor, std::launch::async, [=]() mutable
{
// put all the shared parameters on the first thread's stack
auto result = result_factory();
auto shared_parameter = shared_factory();
// create a lambda to handle parameter passing
auto g = [&,f](size_t idx)
{
agency::detail::invoke(f, idx, result, shared_parameter);
};
size_t mid = n / 2;
std::future<void> left = agency::detail::make_ready_future();
if(0 < mid)
{
left = this->async_execute(g, 0, mid);
}
std::future<void> right = agency::detail::make_ready_future();
if(mid + 1 < n)
{
right = this->async_execute(g, mid + 1, n);
}
g(mid);
left.wait();
right.wait();
return std::move(result);
});
}
return agency::detail::make_ready_future(result_factory());
}
// first must be less than last
template<class Function>
std::future<void> async_execute(Function f, size_t first, size_t last)
{
return std::async(std::launch::async, [=]() mutable
{
size_t mid = (last + first) / 2;
std::future<void> left = detail::make_ready_future();
if(first < mid)
{
left = this->async_execute(f, first, mid);
}
std::future<void> right = detail::make_ready_future();
if(mid + 1 < last)
{
right = this->async_execute(f, mid + 1, last);
}
agency::detail::invoke(f,mid);
left.wait();
right.wait();
});
}
};
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/future/detail/future_cast.hpp>
#include <agency/tuple.hpp>
#include <future>
namespace agency
{
// this case handles executors which have .bulk_then_execute()
__agency_exec_check_disable__
template<class E, class Function, class Future, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(detail::BulkContinuationExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<ResultFactory()>
>
bulk_then_execute(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, ResultFactory result_factory, Factories... shared_factories)
{
return exec.bulk_then_execute(f, shape, predecessor, result_factory, shared_factories...);
}
namespace detail
{
template<class Function, class SharedFuture,
bool Enable = std::is_void<detail::future_value_t<SharedFuture>>::value
>
struct bulk_then_execute_functor
{
mutable Function f_;
mutable SharedFuture fut_;
using predecessor_type = typename future_traits<SharedFuture>::value_type;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~bulk_then_execute_functor() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bulk_then_execute_functor(Function f, const SharedFuture& fut)
: f_(f), fut_(fut)
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bulk_then_execute_functor(const bulk_then_execute_functor&) = default;
__agency_exec_check_disable__
template<class Index, class... Args>
__AGENCY_ANNOTATION
auto operator()(const Index &idx, Args&... args) const ->
decltype(f_(idx, const_cast<predecessor_type&>(fut_.get()),args...))
{
predecessor_type& predecessor = const_cast<predecessor_type&>(fut_.get());
return f_(idx, predecessor, args...);
}
};
template<class Function, class SharedFuture>
struct bulk_then_execute_functor<Function,SharedFuture,true>
{
mutable Function f_;
mutable SharedFuture fut_;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
~bulk_then_execute_functor() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bulk_then_execute_functor(Function f, const SharedFuture& fut)
: f_(f), fut_(fut)
{}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
bulk_then_execute_functor(const bulk_then_execute_functor&) = default;
__agency_exec_check_disable__
template<class Index, class... Args>
__AGENCY_ANNOTATION
auto operator()(const Index &idx, Args&... args) const ->
decltype(f_(idx, args...))
{
fut_.wait();
return f_(idx, args...);
}
};
template<class Function, class SharedFuture>
__AGENCY_ANNOTATION
bulk_then_execute_functor<Function,SharedFuture> make_bulk_then_execute_functor(Function f, const SharedFuture& shared_future)
{
return bulk_then_execute_functor<Function,SharedFuture>(f, shared_future);
}
} // end detail
// this case handles executors which have .bulk_async_execute() and may or may not have .bulk_sync_execute()
__agency_exec_check_disable__
template<class E, class Function, class Future, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(!detail::BulkContinuationExecutor<E>() && detail::BulkAsynchronousExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<ResultFactory()>
>
bulk_then_execute(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, ResultFactory result_factory, Factories... shared_factories)
{
// XXX we may wish to allow the executor to participate in this sharing operation
auto shared_predecessor_future = future_traits<Future>::share(predecessor);
auto functor = detail::make_bulk_then_execute_functor(f, shared_predecessor_future);
return exec.bulk_async_execute(functor, shape, result_factory, shared_factories...);
}
namespace detail
{
// this functor is used by the implementation of bulk_then_execute() below which calls .then() with a nested bulk_sync_execute() inside
// this definition is for the general case, when the predecessor Future type is non-void
template<class Executor, class Function, class Predecessor, class ResultFactory, class... SharedFactories>
struct then_with_nested_bulk_sync_execute_functor
{
mutable Executor exec;
mutable Function f;
executor_shape_t<Executor> shape;
mutable ResultFactory result_factory;
mutable tuple<SharedFactories...> shared_factories;
// this functor is passed to bulk_sync_execute() below
// it has a reference to the predecessor future to use as a parameter to f
struct functor_for_bulk_sync_execute
{
mutable Function f;
Predecessor& predecessor;
template<class Index, class Result, class... SharedArgs>
__AGENCY_ANNOTATION
void operator()(const Index& idx, Result& result, SharedArgs&... shared_args) const
{
agency::detail::invoke(f, idx, predecessor, result, shared_args...);
}
};
__agency_exec_check_disable__
template<size_t... Indices>
__AGENCY_ANNOTATION
result_of_t<ResultFactory()> impl(detail::index_sequence<Indices...>, Predecessor& predecessor) const
{
functor_for_bulk_sync_execute functor{f, predecessor};
return exec.bulk_sync_execute(functor, shape, result_factory, agency::get<Indices>(shared_factories)...);
}
__AGENCY_ANNOTATION
result_of_t<ResultFactory()> operator()(Predecessor& predecessor) const
{
return impl(detail::make_index_sequence<sizeof...(SharedFactories)>(), predecessor);
}
};
// this specialization is for the case when the predecessor Future type is void
template<class Executor, class Function, class ResultFactory, class... SharedFactories>
struct then_with_nested_bulk_sync_execute_functor<Executor,Function,void,ResultFactory,SharedFactories...>
{
mutable Executor exec;
mutable Function f;
executor_shape_t<Executor> shape;
mutable ResultFactory result_factory;
mutable tuple<SharedFactories...> shared_factories;
__agency_exec_check_disable__
template<size_t... Indices>
__AGENCY_ANNOTATION
result_of_t<ResultFactory()> impl(detail::index_sequence<Indices...>) const
{
return exec.bulk_sync_execute(f, shape, result_factory, agency::get<Indices>(shared_factories)...);
}
// the predecessor future is void, so operator() receives no parameter
__AGENCY_ANNOTATION
result_of_t<ResultFactory()> operator()() const
{
return impl(detail::make_index_sequence<sizeof...(SharedFactories)>());
}
};
} // end detail
// this case handles executors which only have .bulk_sync_execute()
__agency_exec_check_disable__
template<class E, class Function, class Future, class ResultFactory, class... Factories,
__AGENCY_REQUIRES(!detail::BulkContinuationExecutor<E>() && !detail::BulkAsynchronousExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories))
>
__AGENCY_ANNOTATION
executor_future_t<
E,
detail::result_of_t<ResultFactory()>
>
bulk_then_execute(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, ResultFactory result_factory, Factories... shared_factories)
{
using predecessor_type = detail::future_value_t<Future>;
detail::then_with_nested_bulk_sync_execute_functor<E,Function,predecessor_type,ResultFactory,Factories...> functor{exec,f,shape,result_factory,agency::make_tuple(shared_factories...)};
auto intermediate_fut = future_traits<Future>::then(predecessor, std::move(functor));
using result_type = detail::result_of_t<ResultFactory()>;
using result_future_type = executor_future_t<E,result_type>;
// XXX we need to call future_cast<result_type>(exec, intermediate_fut) here
// however, #including future_cast.hpp causes circular inclusion problems.
return agency::detail::future_cast<result_future_type>(intermediate_fut);
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/algorithm/copy_n.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/detail/iterator/iterator_traits.hpp>
namespace agency
{
namespace detail
{
template<class ExecutionPolicy, class RandomAccessIterator1, class RandomAccessIterator2,
__AGENCY_REQUIRES(
!policy_is_sequenced<decay_t<ExecutionPolicy>>::value and
iterators_are_random_access<RandomAccessIterator1,RandomAccessIterator2>::value
)>
__AGENCY_ANNOTATION
RandomAccessIterator2 copy(ExecutionPolicy&& policy, RandomAccessIterator1 first, RandomAccessIterator1 last, RandomAccessIterator2 result)
{
auto iter_pair = detail::copy_n(std::forward<ExecutionPolicy>(policy), first, last - first, result);
return agency::get<1>(iter_pair);
}
template<class ExecutionPolicy, class InputIterator, class OutputIterator,
__AGENCY_REQUIRES(
policy_is_sequenced<decay_t<ExecutionPolicy>>::value or
!iterators_are_random_access<InputIterator,OutputIterator>::value
)>
__AGENCY_ANNOTATION
OutputIterator copy(ExecutionPolicy&&, InputIterator first, InputIterator last, OutputIterator result)
{
// XXX we might wish to bulk_invoke a single agent and execute this loop inside
for(; first != last; ++first, ++result)
{
*result = *first;
}
return result;
}
template<class InputIterator, class OutputIterator>
__AGENCY_ANNOTATION
OutputIterator copy(InputIterator first, InputIterator last, OutputIterator result)
{
// pass this instead of agency::seq to work around the prohibition on
// taking the address of a global constexpr object (i.e., agency::seq) from a CUDA __device__ function
agency::sequenced_execution_policy seq;
return detail::copy(seq, first, last, result);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/detail/utility/executor_bulk_result.hpp>
#include <agency/detail/type_traits.hpp>
namespace agency
{
namespace detail
{
template<class Executor, class T, bool Enable = is_bulk_executor<Executor>::value>
struct executor_bulk_result_or_void {};
template<class Executor, class T>
struct executor_bulk_result_or_void<Executor,T,true>
{
using type = typename detail::lazy_conditional<
std::is_void<T>::value,
detail::identity<void>,
executor_bulk_result<Executor,T>
>::type;
};
template<class Executor, class T>
using executor_bulk_result_or_void_t = typename executor_bulk_result_or_void<Executor,T>::type;
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/experimental/ndarray.hpp>
#include <agency/detail/shape_tuple.hpp>
#include <agency/detail/index_tuple.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/memory/detail/unique_ptr.hpp>
#include <agency/execution/executor/detail/this_thread_parallel_executor.hpp>
#include <agency/execution/executor/detail/utility.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/executor_traits/detail/member_barrier_type_or.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include <agency/detail/scoped_in_place_type.hpp>
#include <agency/tuple.hpp>
namespace agency
{
template<class InnerExecutor, class OuterExecutor = this_thread::parallel_executor>
class executor_array
{
public:
using outer_executor_type = OuterExecutor;
using inner_executor_type = InnerExecutor;
private:
using outer_execution_category = executor_execution_category_t<outer_executor_type>;
using inner_execution_category = executor_execution_category_t<inner_executor_type>;
constexpr static size_t inner_depth = executor_execution_depth<inner_executor_type>::value;
public:
using execution_category = scoped_execution_tag<outer_execution_category,inner_execution_category>;
using outer_shape_type = executor_shape_t<outer_executor_type>;
using inner_shape_type = executor_shape_t<inner_executor_type>;
using outer_index_type = executor_index_t<outer_executor_type>;
using inner_index_type = executor_index_t<inner_executor_type>;
using shape_type = detail::scoped_shape_t<outer_execution_category,inner_execution_category,outer_shape_type,inner_shape_type>;
using index_type = detail::scoped_index_t<outer_execution_category,inner_execution_category,outer_index_type,inner_index_type>;
using barrier_type = detail::scoped_in_place_type_t_cat_t<
detail::make_scoped_in_place_type_t<detail::member_barrier_type_or_t<outer_executor_type,void>>,
detail::make_scoped_in_place_type_t<detail::member_barrier_type_or_t<inner_executor_type,void>>
>;
__AGENCY_ANNOTATION
static shape_type make_shape(const outer_shape_type& outer_shape, const inner_shape_type& inner_shape)
{
return detail::make_scoped_shape<outer_execution_category,inner_execution_category>(outer_shape, inner_shape);
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
executor_array() = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
executor_array(const executor_array&) = default;
__agency_exec_check_disable__
__AGENCY_ANNOTATION
executor_array(size_t n, const inner_executor_type& exec = inner_executor_type())
: inner_executors_(n, exec)
{}
template<class Iterator>
executor_array(Iterator executors_begin, Iterator executors_end)
: inner_executors_(executors_begin, executors_end)
{}
template<class T>
using future = executor_future_t<outer_executor_type,T>;
template<class T>
using allocator = executor_allocator_t<outer_executor_type,T>;
// XXX this functor is public to allow nvcc to instantiate kernels with it
template<class Futures, class UniquePtr1, class UniquePtr2>
struct wait_for_futures_and_move_result
{
mutable Futures futures_;
mutable UniquePtr1 result_ptr_;
mutable UniquePtr2 shared_arg_ptr_;
__AGENCY_ANNOTATION
typename std::pointer_traits<UniquePtr1>::element_type
operator()() const
{
for(auto& f : futures_)
{
f.wait();
}
return std::move(*result_ptr_);
}
};
private:
template<class Futures, class UniquePtr1, class UniquePtr2>
__AGENCY_ANNOTATION
wait_for_futures_and_move_result<typename std::decay<Futures>::type,UniquePtr1,UniquePtr2>
make_wait_for_futures_and_move_result(Futures&& futures, UniquePtr1&& result_ptr, UniquePtr2&& shared_arg_ptr)
{
return wait_for_futures_and_move_result<typename std::decay<Futures>::type,UniquePtr1,UniquePtr2>{std::move(futures),std::move(result_ptr),std::move(shared_arg_ptr)};
}
__AGENCY_ANNOTATION
static outer_shape_type outer_shape(const shape_type& shape)
{
// the outer portion is always the head of the tuple
return __tu::tuple_head(shape);
}
__AGENCY_ANNOTATION
static inner_shape_type inner_shape(const shape_type& shape)
{
// the inner portion of the shape is the tail of the tuple
return detail::make_from_tail<inner_shape_type>(shape);
}
__AGENCY_ANNOTATION
static index_type make_index(const outer_index_type& outer_idx, const inner_index_type& inner_idx)
{
return detail::make_scoped_index<outer_execution_category,inner_execution_category>(outer_idx, inner_idx);
}
__AGENCY_ANNOTATION
size_t select_inner_executor(const outer_index_type& idx, const outer_shape_type& shape) const
{
size_t rank = detail::index_cast<size_t>(idx, shape, inner_executors_.size());
// round robin through inner executors
return rank % inner_executors_.size();
}
// lazy implementation of then_execute()
// it is universally applicable, but it might not be as efficient
// as calling execute() eagerly on the outer_executor
struct lazy_strategy {};
// eager implementation of then_execute()
// not universally applicable, but can be more efficient
// because work gets issued immediately to the outer_executor via execute()
struct eager_strategy {};
// XXX make this functor public to accomodate nvcc's requirement
// on types used to instantiate __global__ function templates
template<class Function, class... InnerFactories>
struct lazy_bulk_then_execute_functor
{
mutable executor_array exec;
outer_shape_type outer_shape;
inner_shape_type inner_shape;
mutable Function f;
tuple<InnerFactories...> inner_factories;
template<class... OuterArgs>
struct inner_functor
{
mutable Function f;
outer_index_type outer_idx;
tuple<OuterArgs&...> outer_args;
template<size_t... Indices, class... InnerSharedArgs>
__AGENCY_ANNOTATION
void impl(detail::index_sequence<Indices...>, const inner_index_type& inner_idx, InnerSharedArgs&... inner_args) const
{
index_type idx = make_index(outer_idx, inner_idx);
f(idx, agency::get<Indices>(outer_args)..., inner_args...);
}
template<class... InnerSharedArgs>
__AGENCY_ANNOTATION
void operator()(const inner_index_type& inner_idx, InnerSharedArgs&... inner_shared_args) const
{
impl(detail::make_index_sequence<sizeof...(OuterArgs)>(), inner_idx, inner_shared_args...);
}
};
template<size_t... Indices, class... OuterArgs>
__AGENCY_ANNOTATION
void impl(detail::index_sequence<Indices...>, const outer_index_type& outer_idx, OuterArgs&... outer_args) const
{
auto inner_executor_idx = exec.select_inner_executor(outer_idx, outer_shape);
inner_executor_type& inner_exec = exec.inner_executor(inner_executor_idx);
detail::bulk_synchronous_executor_adaptor<inner_executor_type> adapted_exec(inner_exec);
// XXX avoid lambdas to workaround nvcc limitations
//detail::bulk_sync_execute_with_void_result(adapted_exec, [=,&predecessor,&result,&outer_shared_arg](const inner_index_type& inner_idx, detail::result_of_t<InnerFactories()>&... inner_shared_args)
//{
// index_type idx = make_index(outer_idx, inner_idx);
// f(idx, predecessor, result, outer_shared_arg, inner_shared_args...);
//},
//inner_shape,
//agency::get<Indices>(inner_factories)...);
inner_functor<OuterArgs...> execute_me{f, outer_idx, agency::forward_as_tuple(outer_args...)};
detail::bulk_sync_execute_with_void_result(adapted_exec, execute_me, inner_shape, agency::get<Indices>(inner_factories)...);
}
template<class... OuterArgs>
__AGENCY_ANNOTATION
void operator()(const outer_index_type& outer_idx, OuterArgs&... outer_args) const
{
impl(detail::make_index_sequence<sizeof...(InnerFactories)>(), outer_idx, outer_args...);
}
};
private:
template<class Function, class Future, class ResultFactory, class OuterFactory, class... InnerFactories>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
lazy_bulk_then_execute(Function f, shape_type shape, Future& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactories... inner_factories)
{
// this implementation of bulk_then_execute() is "lazy" in the sense that it
// immediately calls bulk_then_execute() on the outer executor, but bulk_sync_execute() is
// called on the inner executors eventually at some point in the future
// split shape into its outer and inner components
outer_shape_type outer_shape = this->outer_shape(shape);
inner_shape_type inner_shape = this->inner_shape(shape);
// this commented-out code expressed with two lambdas is functionally equivalent to what happens with the named
// functors below
// XXX avoid lambdas to workaround nvcc limitations as well as lack of polymorphic lambda in c++11
//return bulk_then_execute(outer_executor(), [=](const outer_index_type& outer_idx, auto&... outer_args)
//{
// auto inner_executor_idx = select_inner_executor(outer_idx, outer_shape);
// bulk_sync_execute_with_void_result(inner_executor(inner_executor_idx), [=](const inner_index_type& inner_idx, auto&... inner_args)
// {
// index_type idx = make_index(outer_idx, inner_idx);
// f(idx, outer_args..., inner_args...);
// });
//},
//outer_shape,
//predecessor,
//result_factory,
//outer_factory
//);
lazy_bulk_then_execute_functor<Function,InnerFactories...> execute_me{*this,outer_shape,inner_shape,f,agency::make_tuple(inner_factories...)};
detail::bulk_continuation_executor_adaptor<outer_executor_type> adapted_exec(outer_executor());
return adapted_exec.bulk_then_execute(execute_me, outer_shape, predecessor, result_factory, outer_factory);
}
template<class Function, class Future, class ResultFactory, class OuterFactory, class... InnerFactories>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
bulk_then_execute_impl(lazy_strategy, Function f, shape_type shape, Future& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactories... inner_factories)
{
return lazy_bulk_then_execute(f, shape, predecessor, result_factory, outer_factory, inner_factories...);
}
public:
// XXX make this functor public to accomodate nvcc's requirement
// on types used to instantiate __global__ function templates
template<class Function, class Futures, class Result, class OuterShared, class... Factories>
struct eager_bulk_then_execute_functor
{
executor_array& exec;
mutable Function f;
Futures& predecessor_futures;
Result* result_ptr;
OuterShared* outer_shared_arg_ptr;
tuple<Factories...> inner_factories;
outer_shape_type outer_shape;
inner_shape_type inner_shape;
struct inner_functor
{
mutable Function f;
outer_index_type outer_idx;
Result& result;
OuterShared& outer_arg;
// this overload is chosen when the future is void
// no additional past_arg will be passed to this function, so the number of Args
// is equal to the number of shared Factories
// unfortunately, this seems to be the most straightforward application of SFINAE
// to choose between these two cases
template<class... Args,
class = typename std::enable_if<
sizeof...(Args) == sizeof...(Factories)
>::type>
__AGENCY_ANNOTATION
void operator()(const inner_index_type& inner_idx, Args&... inner_shared_args) const
{
auto idx = make_index(outer_idx, inner_idx);
// when the predecessor future is void, there's no predecessor argument to pass to invoke()
agency::detail::invoke(f, idx, result, outer_arg, inner_shared_args...);
}
// this overload is chosen when the future is not void
// an additional past_arg will be passed to this function, so the number of Args
// unfortunately, this seems to be the most straightforward application of SFINAE
// to choose between these two cases
template<class Predecessor,
class... Args,
class = typename std::enable_if<
sizeof...(Args) == sizeof...(Factories)
>::type>
__AGENCY_ANNOTATION
void operator()(const inner_index_type& inner_idx, Predecessor& predecessor, Args&... inner_shared_args) const
{
auto idx = make_index(outer_idx, inner_idx);
// when predecessor is not void, we pass it to invoke in the slot before result
agency::detail::invoke(f, idx, predecessor, result, outer_arg, inner_shared_args...);
}
};
template<size_t... Indices>
__AGENCY_ANNOTATION
executor_future_t<inner_executor_type,void>
impl(detail::index_sequence<Indices...>, const outer_index_type& outer_idx) const
{
auto inner_executor_idx = exec.select_inner_executor(outer_idx, outer_shape);
detail::bulk_continuation_executor_adaptor<inner_executor_type> adapted_inner_executor(exec.inner_executor(inner_executor_idx));
return detail::bulk_then_execute_with_void_result(
adapted_inner_executor,
inner_functor{f,outer_idx,*result_ptr,*outer_shared_arg_ptr},
inner_shape,
predecessor_futures[outer_idx],
agency::get<Indices>(inner_factories)...
);
}
__AGENCY_ANNOTATION
executor_future_t<inner_executor_type,void>
operator()(const outer_index_type& outer_idx) const
{
return impl(detail::index_sequence_for<Factories...>(), outer_idx);
}
};
private:
template<class Function, class Future, class ResultFactory, class OuterFactory, class... InnerFactories>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
eager_bulk_then_execute(Function f, shape_type shape, Future& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactories... inner_factories)
{
// this implementation legal when the outer_category is not sequenced
// XXX and the inner executor's is concurrent with the launching agent
// i.e., we have to make sure that the inner call to bulk_then_execute() actually makes progress
// without having to call .get() on the returned futures
// separate the shape into inner and outer portions
auto outer_shape = this->outer_shape(shape);
auto inner_shape = this->inner_shape(shape);
// create the results via the result_factory
using result_type = decltype(result_factory());
auto results_ptr = detail::allocate_unique<result_type>(allocator<result_type>(), result_factory());
result_type* results_raw_ptr = results_ptr.get();
// create the outer shared argument via the outer_factory
using outer_shared_arg_type = decltype(outer_factory());
auto outer_shared_arg_ptr = detail::allocate_unique<outer_shared_arg_type>(allocator<outer_shared_arg_type>(), outer_factory());
outer_shared_arg_type* outer_shared_arg_raw_ptr = outer_shared_arg_ptr.get();
// split the predecessor future into a collection of shared futures
auto shared_predecessor_futures = detail::bulk_share_future(outer_executor(), outer_shape, predecessor);
using future_container = decltype(shared_predecessor_futures);
// XXX avoid lambdas to workaround nvcc limitations as well as lack of polymorphic lambda
//auto inner_futures = bulk_sync_execute_with_auto_result_and_without_shared_parameters(outer_executor(), [=,&past_futures](const outer_index_type& outer_idx) mutable
//{
// auto inner_executor_idx = select_inner_executor(outer_idx, outer_shape);
//
// using past_arg_type = detail::future_value_t<Future>;
//
// return bulk_then_execute_with_void_result(inner_executor(inner_executor_idx), [=](const inner_index_type& inner_idx, past_arg_type& past_arg, decltype(inner_factories())&... inner_shared_args) mutable
// {
// auto idx = make_index(outer_idx, inner_idx);
// (*results_raw_ptr)[idx] = agency::detail::invoke(f, idx, past_arg, *outer_shared_arg_raw_ptr, inner_shared_args...);
// },
// inner_shape,
// past_futures[outer_idx],
// inner_factories...);
//},
//outer_shape);
auto functor = eager_bulk_then_execute_functor<Function,future_container,result_type,outer_shared_arg_type,InnerFactories...>{*this, f, shared_predecessor_futures, results_raw_ptr, outer_shared_arg_raw_ptr, agency::make_tuple(inner_factories...), outer_shape, inner_shape};
auto inner_futures = detail::bulk_sync_execute_with_auto_result_and_without_shared_parameters(outer_executor(), functor, outer_shape);
// create a continuation to synchronize the futures and return the result
auto continuation = make_wait_for_futures_and_move_result(std::move(inner_futures), std::move(results_ptr), std::move(outer_shared_arg_ptr));
// async_execute() with the outer executor to launch the continuation
return agency::async_execute(outer_executor(), std::move(continuation));
}
template<class Function, class Future, class ResultFactory, class OuterFactory, class... InnerFactories>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
bulk_then_execute_impl(eager_strategy, Function f, shape_type shape, Future& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactories... inner_factories)
{
return eager_bulk_then_execute(f, shape, predecessor, result_factory, outer_factory, inner_factories...);
}
public:
template<class Function, class Future, class ResultFactory, class OuterFactory, class... InnerFactories,
__AGENCY_REQUIRES(sizeof...(InnerFactories) == inner_depth)
>
__AGENCY_ANNOTATION
future<detail::result_of_t<ResultFactory()>>
bulk_then_execute(Function f, shape_type shape, Future& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactories... inner_factories)
{
// tag dispatch the appropriate implementation strategy for bulk_then_execute() using this first parameter
return bulk_then_execute_impl(bulk_then_execute_implementation_strategy(), f, shape, predecessor, result_factory, outer_factory, inner_factories...);
}
private:
outer_executor_type outer_executor_;
experimental::ndarray<inner_executor_type, 1, allocator<inner_executor_type>> inner_executors_;
using bulk_then_execute_implementation_strategy = typename std::conditional<
detail::disjunction<
std::is_same<outer_execution_category, sequenced_execution_tag>,
std::is_same<inner_execution_category, sequenced_execution_tag> // XXX this should really check whether the inner executor's async_execute() method executes concurrently with the caller
>::value,
lazy_strategy,
eager_strategy
>::type;
// XXX eliminate this when we eliminate .then_execute()
using then_execute_implementation_strategy = bulk_then_execute_implementation_strategy;
public:
__AGENCY_ANNOTATION
auto begin() ->
decltype(inner_executors_.begin())
{
return inner_executors_.begin();
}
__AGENCY_ANNOTATION
auto begin() const ->
decltype(inner_executors_.cbegin())
{
return inner_executors_.cbegin();
}
__AGENCY_ANNOTATION
auto end() ->
decltype(inner_executors_.end())
{
return inner_executors_.end();
}
__AGENCY_ANNOTATION
auto end() const ->
decltype(inner_executors_.cend())
{
return inner_executors_.cend();
}
__AGENCY_ANNOTATION
size_t size() const
{
return inner_executors_.size();
}
__AGENCY_ANNOTATION
shape_type unit_shape() const
{
auto outer_exec_shape = size() * agency::unit_shape(outer_executor());
auto inner_exec_shape = agency::unit_shape(inner_executor(0));
return make_shape(outer_exec_shape, inner_exec_shape);
}
__AGENCY_ANNOTATION
shape_type max_shape_dimensions() const
{
// XXX might want to multiply shape() * max_shape_dimensions(outer_executor())
auto outer_max_shape = agency::max_shape_dimensions(outer_executor());
auto inner_max_shape = agency::max_shape_dimensions(inner_executor(0));
return make_shape(outer_max_shape, inner_max_shape);
}
__AGENCY_ANNOTATION
outer_executor_type& outer_executor()
{
return outer_executor_;
}
__AGENCY_ANNOTATION
const outer_executor_type& outer_executor() const
{
return outer_executor_;
}
__AGENCY_ANNOTATION
inner_executor_type& inner_executor(size_t i)
{
return begin()[i];
}
__AGENCY_ANNOTATION
const inner_executor_type& inner_executor(size_t i) const
{
return begin()[i];
}
__AGENCY_ANNOTATION
inner_executor_type& operator[](size_t i)
{
return inner_executors_[i];
}
__AGENCY_ANNOTATION
const inner_executor_type& operator[](size_t i) const
{
return inner_executors_[i];
}
};
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <cstddef>
#include <type_traits>
namespace agency
{
namespace detail
{
__AGENCY_ANNOTATION
inline void throw_bad_alloc()
{
#ifdef __CUDA_ARCH__
printf("bad_alloc");
assert(0);
#else
throw std::bad_alloc();
#endif
}
} // end detail
template<class T>
struct allocator
{
using value_type = T;
using propagate_on_container_move_assignment = std::true_type;
using is_always_equal = std::true_type;
__AGENCY_ANNOTATION
allocator() = default;
__AGENCY_ANNOTATION
allocator(const allocator&) = default;
template<class U>
__AGENCY_ANNOTATION
allocator(const allocator<U>&){}
__AGENCY_ANNOTATION
~allocator(){}
__AGENCY_ANNOTATION
T* allocate(std::size_t n)
{
T* result = static_cast<T*>(::operator new(sizeof(value_type) * n));
if(result == nullptr)
{
detail::throw_bad_alloc();
}
return result;
}
__AGENCY_ANNOTATION
void deallocate(T* p, std::size_t)
{
::operator delete(p);
}
};
template<class T1, class T2>
__AGENCY_ANNOTATION
bool operator==(const allocator<T1>&, const allocator<T2>&)
{
return true;
}
template<class T1, class T2>
__AGENCY_ANNOTATION
bool operator!=(const allocator<T1>&, const allocator<T2>&)
{
return false;
}
} // end agency
<file_sep>/// \file
/// \brief Include this file to use any component of Agency which is not experimental and which requires
/// CUDA C++ language extensions.
///
/// Including `<agency/cuda.hpp>` recursively includes most of the header files organized beneath
/// `<agency/cuda/*>`. It is provided for quick access to most of Agency's CUDA features. Features not included
/// by this header, but which are organized beneath `<agency/cuda/*>` are considered experimental.
///
/// Specifically, `<agency/cuda.hpp>` provides definitions for all entities inside the `agency::cuda` namespace,
/// except for `agency::cuda::experimental`.
///
#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/algorithm.hpp>
#include <agency/cuda/device.hpp>
#include <agency/cuda/future.hpp>
#include <agency/cuda/execution.hpp>
#include <agency/cuda/memory.hpp>
/// \namespace agency::cuda
/// \brief `agency::cuda` is the namespace which contains CUDA-specific functionality.
///
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/future.hpp>
#include <agency/detail/control_structures/shared_parameter.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
// this type trait computes the type of the parameter passed to a user function
// given then type of parameter passed to bulk_invoke/bulk_async/etc.
// parameters are passed by value unless they are special parameters like
// shared parameters. These are passed by reference.
template<class T>
struct decay_parameter
{
template<class U>
struct lazy_add_lvalue_reference
{
using type = typename std::add_lvalue_reference<typename U::type>::type;
};
// first decay the parameter
using decayed_type = typename std::decay<T>::type;
// when passing a parameter to the user's function:
// if the parameter is a future, then we pass a reference to its value type
// otherwise, we pass a copy of the decayed_type
using type = typename detail::lazy_conditional<
is_future<decayed_type>::value,
lazy_add_lvalue_reference<future_value<decayed_type>>,
identity<decayed_type>
>::type;
};
template<class T>
using decay_parameter_t = typename decay_parameter<T>::type;
template<size_t level, class Factory>
struct decay_parameter<shared_parameter<level,Factory>>
{
// shared_parameters are passed to the user function by reference
using type = typename shared_parameter<level,Factory>::value_type &;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/concurrent_executor.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include <agency/execution/executor/executor_array.hpp>
#include <agency/execution/executor/flattened_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/parallel_executor.hpp>
#include <agency/execution/executor/scoped_executor.hpp>
#include <agency/execution/executor/sequenced_executor.hpp>
#include <agency/execution/executor/unsequenced_executor.hpp>
#include <agency/execution/executor/variant_executor.hpp>
#include <agency/execution/executor/vector_executor.hpp>
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/execution/executor/scoped_executor.hpp>
#include <agency/execution/executor/flattened_executor.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points.hpp>
#include "test_executors.hpp"
template<class OuterExecutor, class InnerExecutor>
void test(OuterExecutor outer_exec, InnerExecutor inner_exec)
{
using namespace agency;
using scoped_executor_type = scoped_executor<OuterExecutor,InnerExecutor>;
using flattened_executor_type = flattened_executor<scoped_executor_type>;
static_assert(is_bulk_continuation_executor<flattened_executor_type>::value,
"flattened_executor should be a bulk continuation executor");
static_assert(detail::is_detected_exact<size_t, executor_shape_t, flattened_executor_type>::value,
"flattened_executor should have size_t shape_type");
static_assert(detail::is_detected_exact<size_t, executor_index_t, flattened_executor_type>::value,
"flattened_executor should have size_t index_type");
static_assert(detail::is_detected_exact<executor_future_t<OuterExecutor,int>, executor_future_t, flattened_executor_type, int>::value,
"flattened_executor should have the same future type as OuterExecutor");
const size_t scoped_depth = executor_execution_depth<scoped_executor_type>::value;
static_assert(executor_execution_depth<flattened_executor_type>::value == scoped_depth - 1,
"flattened_executor should have execution_depth == scoped_depth - 1");
flattened_executor_type exec(scoped_executor_type(outer_exec,inner_exec));
std::future<int> fut = make_ready_future<int>(exec, 7);
using shape_type = executor_shape_t<flattened_executor_type>;
shape_type shape(10);
using index_type = executor_index_t<flattened_executor_type>;
auto f = exec.bulk_then_execute(
[=](index_type idx, int& past_arg, std::vector<int>& results, std::vector<int>& shared_arg)
{
results[idx] = past_arg + shared_arg[idx];
},
shape,
fut,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(shape, 7 + 13) == result);
}
int main()
{
test(bulk_continuation_executor(), bulk_continuation_executor());
test(bulk_continuation_executor(), bulk_synchronous_executor());
test(bulk_continuation_executor(), bulk_asynchronous_executor());
test(bulk_synchronous_executor(), bulk_continuation_executor());
test(bulk_synchronous_executor(), bulk_synchronous_executor());
test(bulk_synchronous_executor(), bulk_asynchronous_executor());
test(bulk_asynchronous_executor(), bulk_continuation_executor());
test(bulk_asynchronous_executor(), bulk_synchronous_executor());
test(bulk_asynchronous_executor(), bulk_asynchronous_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/execution/execution_policy.hpp>
namespace agency
{
namespace omp
{
class parallel_for_executor
{
public:
using execution_category = parallel_execution_tag;
template<class Function, class ResultFactory, class SharedFactory>
agency::detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
#ifndef _OPENMP
static_assert(sizeof(Function) && false, "agency::omp::parallel_for_executor requires C++ OpenMP language extensions (typically enabled with -fopenmp or /openmp).");
#endif
auto result = result_factory();
auto shared_parm = shared_factory();
#pragma omp parallel for
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_parm);
}
return std::move(result);
}
};
using parallel_executor = parallel_for_executor;
class simd_executor
{
public:
using execution_category = unsequenced_execution_tag;
template<class Function, class ResultFactory, class SharedFactory>
agency::detail::result_of_t<ResultFactory()>
bulk_sync_execute(Function f, size_t n, ResultFactory result_factory, SharedFactory shared_factory)
{
#if _OPENMP < 201307
static_assert(sizeof(Function) && false, "agency::omp::simd_executor requires C++ OpenMP 4.0 or better language extensions (typically enabled with -fopenmp or /openmp).");
#endif
auto result = result_factory();
auto shared_parm = shared_factory();
#pragma omp simd
for(size_t i = 0; i < n; ++i)
{
f(i, result, shared_parm);
}
return std::move(result);
}
};
using unsequenced_executor = simd_executor;
class parallel_execution_policy : public basic_execution_policy<parallel_agent, omp::parallel_executor, parallel_execution_policy>
{
private:
using super_t = basic_execution_policy<parallel_agent, omp::parallel_executor, parallel_execution_policy>;
public:
using super_t::basic_execution_policy;
};
const parallel_execution_policy par{};
class unsequenced_execution_policy : public basic_execution_policy<unsequenced_agent, omp::unsequenced_executor, unsequenced_execution_policy>
{
private:
using super_t = basic_execution_policy<unsequenced_agent, omp::unsequenced_executor, unsequenced_execution_policy>;
public:
using super_t::basic_execution_policy;
};
const unsequenced_execution_policy unseq{};
} // end omp
} // end agency
<file_sep>#include <cassert>
#include <agency/future/always_ready_future.hpp>
#include <agency/future/future_traits.hpp>
#include <iostream>
int main()
{
using namespace agency;
static_assert(agency::is_future<always_ready_future<int>>::value, "always_ready_future<int> is not a future");
{
// make_ready int
always_ready_future<int> f0 = always_ready_future<int>::make_ready(13);
assert(f0.valid());
assert(f0.get() == 13);
}
{
// make_ready void
always_ready_future<void> f0 = always_ready_future<void>::make_ready();
assert(f0.valid());
}
{
// move construct int
always_ready_future<int> f0 = always_ready_future<int>::make_ready(13);
assert(f0.valid());
always_ready_future<int> f1 = std::move(f0);
assert(!f0.valid());
assert(f1.valid());
assert(f1.get() == 13);
}
{
// move construct void
always_ready_future<void> f0 = always_ready_future<void>::make_ready();
assert(f0.valid());
always_ready_future<void> f1 = std::move(f0);
assert(!f0.valid());
assert(f1.valid());
}
{
// move assign int
always_ready_future<int> f1 = always_ready_future<int>::make_ready(13);
assert(f1.valid());
always_ready_future<int> f2(7);
assert(f2.valid());
f2 = std::move(f1);
assert(!f1.valid());
assert(f2.valid());
assert(f2.get() == 13);
}
{
// move assign void
always_ready_future<void> f1 = always_ready_future<void>::make_ready();
assert(f1.valid());
always_ready_future<void> f2;
assert(f2.valid());
f2 = std::move(f1);
assert(!f1.valid());
assert(f2.valid());
}
{
// then int -> int
auto f1 = always_ready_future<int>(7);
bool continuation_executed = false;
auto f2 = f1.then([&](int& x)
{
continuation_executed = true;
return x + 13;
});
assert(continuation_executed);
assert(!f1.valid());
assert(f2.valid());
int result = f2.get();
assert(!f2.valid());
assert(result == 7 + 13);
}
{
// then int -> void
auto f1 = always_ready_future<int>(13);
bool continuation_executed = false;
auto f2 = f1.then([&](int &x)
{
continuation_executed = true;
});
assert(continuation_executed);
assert(!f1.valid());
assert(f2.valid());
f2.get();
assert(!f2.valid());
}
{
// then void -> int
always_ready_future<void> f1;
bool continuation_executed = false;
auto f2 = f1.then([&]()
{
continuation_executed = true;
return 7;
});
assert(continuation_executed);
assert(!f1.valid());
assert(f2.valid());
int result = f2.get();
assert(!f2.valid());
assert(result == 7);
}
{
// then void -> void
always_ready_future<void> f1;
bool continuation_executed = false;
auto f2 = f1.then([&]()
{
continuation_executed = true;
});
assert(continuation_executed);
assert(!f1.valid());
assert(f2.valid());
f2.get();
assert(!f2.valid());
}
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/exception_list.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/tuple/tuple_utility.hpp>
#include <agency/future/future_traits/future_rebind_value.hpp>
#include <agency/detail/has_member.hpp>
#include <future>
#include <utility>
namespace agency
{
namespace detail
{
template<class T, class... Args,
class = typename std::enable_if<
!std::is_void<T>::value
>::type>
std::future<T> make_ready_future(Args&&... args)
{
std::promise<T> p;
p.set_value(T(std::forward<Args>(args)...));
return p.get_future();
}
template<class T, class... Args,
class = typename std::enable_if<
std::is_void<T>::value
>::type>
std::future<T> make_ready_future()
{
std::promise<T> p;
p.set_value();
return p.get_future();
}
template<class T>
std::future<decay_t<T>> make_ready_future(T&& value)
{
std::promise<T> p;
p.set_value(std::forward<T>(value));
return p.get_future();
}
inline std::future<void> make_ready_future()
{
std::promise<void> p;
p.set_value();
return p.get_future();
}
template<class T, class... Args,
class = typename std::enable_if<
!std::is_void<T>::value
>::type>
std::shared_future<T> make_ready_shared_future(Args&&... args)
{
return detail::make_ready_future<T>(std::forward<Args>(args)...).share();
}
template<class T, class... Args,
class = typename std::enable_if<
std::is_void<T>::value
>::type>
std::shared_future<T> make_ready_shared_future()
{
return detail::make_ready_future<T>().share();
}
template<class T>
std::shared_future<decay_t<T>> make_ready_shared_future(T&& value)
{
return detail::make_ready_future(std::forward<T>(value)).share();
}
inline std::shared_future<void> make_ready_shared_future()
{
return detail::make_ready_future().share();
}
// XXX when_all is supposed to return a future<vector>
template<typename ForwardIterator>
std::future<void> when_all(ForwardIterator first, ForwardIterator last)
{
exception_list exceptions = flatten_into_exception_list(first, last);
std::promise<void> p;
if(exceptions.size() > 0)
{
p.set_exception(std::make_exception_ptr(exceptions));
}
else
{
p.set_value();
}
return p.get_future();
}
// then() with launch policy for std::future
template<class T, class Function>
std::future<detail::result_of_t<Function(std::future<T>&)>>
then(std::future<T>& fut, std::launch policy, Function&& f)
{
return std::async(policy, [](std::future<T>&& fut, Function&& f)
{
fut.wait();
return std::forward<Function>(f)(fut);
},
std::move(fut),
std::forward<Function>(f)
);
}
template<class T, class Function>
std::future<detail::result_of_t<Function(T&)>>
monadic_then(std::future<T>& fut, std::launch policy, Function&& f)
{
return std::async(policy, [](std::future<T>&& fut, Function&& f)
{
T arg = fut.get();
return std::forward<Function>(f)(arg);
},
std::move(fut),
std::forward<Function>(f)
);
}
template<class Function>
std::future<detail::result_of_t<Function()>>
monadic_then(std::future<void>& fut, std::launch policy, Function&& f)
{
return std::async(policy, [](std::future<void>&& fut, Function&& f)
{
fut.get();
return std::forward<Function>(f)();
},
std::move(fut),
std::forward<Function>(f)
);
}
// then() for std::future
template<class T, class Function>
std::future<detail::result_of_t<Function(std::future<T>&)>>
then(std::future<T>& fut, Function&& f)
{
return detail::then(fut, std::launch::async | std::launch::deferred, std::forward<Function>(f));
}
template<class T, class Function>
auto monadic_then(std::future<T>& fut, Function&& f) ->
decltype(detail::monadic_then(fut, std::launch::async | std::launch::deferred, std::forward<Function>(f)))
{
return detail::monadic_then(fut, std::launch::async | std::launch::deferred, std::forward<Function>(f));
}
// then() with launch policy for std::shared_future
template<class T, class Function>
std::future<detail::result_of_t<Function(std::shared_future<T>&)>>
then(std::shared_future<T>& fut, std::launch policy, Function&& f)
{
return std::async(policy, [](std::shared_future<T>&& fut, Function&& f)
{
fut.wait();
return std::forward<Function>(f)(fut);
},
std::move(fut),
std::forward<Function>(f)
);
}
// then() for std::shared_future
template<class T, class Function>
std::future<detail::result_of_t<Function(std::shared_future<T>&)>>
then(std::shared_future<T>& fut, Function&& f)
{
return detail::then(fut, std::launch::async | std::launch::deferred, std::forward<Function>(f));
}
// monadic_then() for std::shared_future
template<class T, class Function>
std::future<detail::result_of_t<Function(T&)>>
monadic_then(std::shared_future<T>& fut, std::launch policy, Function&& f)
{
return std::async(policy, [](std::shared_future<T>&& fut, Function&& f)
{
T& arg = const_cast<T&>(fut.get());
return std::forward<Function>(f)(arg);
},
std::move(fut),
std::forward<Function>(f)
);
}
template<class Function>
std::future<detail::result_of_t<Function()>>
monadic_then(std::shared_future<void>& fut, std::launch policy, Function&& f)
{
return std::async(policy, [](std::shared_future<void>&& fut, Function&& f)
{
fut.get();
return std::forward<Function>(f)();
},
std::move(fut),
std::forward<Function>(f)
);
}
template<class T, class Function>
auto monadic_then(std::shared_future<T>& fut, Function&& f) ->
decltype(detail::monadic_then(fut, std::launch::async | std::launch::deferred, std::forward<Function>(f)))
{
return detail::monadic_then(fut, std::launch::async | std::launch::deferred, std::forward<Function>(f));
}
__DEFINE_HAS_MEMBER_TYPE(has_value_type, value_type);
template<class Future>
struct future_value
{
// the decay removes the reference returned
// from futures like shared_future
// the idea is given Future<T>,
// future_value<Future<T>> returns T
using type = typename std::decay<
decltype(std::declval<Future>().get())
>::type;
};
template<class Future>
using future_value_t = typename future_value<Future>::type;
namespace is_future_detail
{
template<class T>
struct has_wait_impl
{
template<class Future,
class = decltype(
std::declval<Future>().wait()
)>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
using has_wait = typename has_wait_impl<T>::type;
template<class T>
struct has_get_impl
{
template<class Future,
class = decltype(std::declval<Future>().get())
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
using has_get = typename has_get_impl<T>::type;
} // end is_future_detail
template<class T>
struct is_future
: std::integral_constant<
bool,
is_future_detail::has_wait<T>::value && is_future_detail::has_get<T>::value
>
{};
template<class T>
struct is_non_void_future_impl
{
template<class U,
class = typename std::enable_if<
is_future<U>::value
>::type,
class = typename std::enable_if<
!std::is_void<future_value_t<U>>::value
>::type
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
struct is_non_void_future
: is_non_void_future_impl<T>::type
{};
template<class T>
struct is_void_future_impl
{
template<class U,
class = typename std::enable_if<
is_future<U>::value
>::type,
class = typename std::enable_if<
std::is_void<future_value_t<U>>::value
>::type
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
struct is_void_future
: is_void_future_impl<T>::type
{};
template<class T, template<class> class Future, class Enable = void>
struct is_instance_of_future : std::false_type {};
template<class T, template<class> class Future>
struct is_instance_of_future<T,Future,
typename std::enable_if<
is_future<T>::value
>::type
> : std::is_same<
T,
Future<
typename future_value<T>::type
>
>
{};
// figure out whether a function is callable as a continuation given a list of parameters
template<class Function, class... FutureOrT>
struct is_callable_continuation_impl
{
using types = type_list<FutureOrT...>;
template<class T>
struct lazy_add_lvalue_reference
{
using type = typename std::add_lvalue_reference<typename T::type>::type;
};
template<class T>
struct map_futures_to_lvalue_reference_to_value_type
: lazy_conditional<
is_future<T>::value,
lazy_add_lvalue_reference<future_value<T>>,
identity<T>
>
{};
// turn futures into lvalue references to their values
using value_types = type_list_map<map_futures_to_lvalue_reference_to_value_type,types>;
template<class T>
struct is_not_void : std::integral_constant<bool, !std::is_void<T>::value> {};
// filter out void
using non_void_value_types = type_list_filter<is_not_void,value_types>;
// add lvalue reference
using references = type_list_map<std::add_lvalue_reference,non_void_value_types>;
// get the type of the result of applying the Function to the references
using type = typename type_list_is_callable<Function, references>::type;
};
template<class Function, class... FutureOrT>
using is_callable_continuation = typename is_callable_continuation_impl<Function,FutureOrT...>::type;
// figure out the result of applying Function to a list of parameters
// when a parameter is a future, it gets unwrapped into an lvalue of its value_type
template<class Function, class... FutureOrT>
struct result_of_continuation
{
using types = type_list<FutureOrT...>;
template<class T>
struct lazy_add_lvalue_reference
{
using type = typename std::add_lvalue_reference<typename T::type>::type;
};
template<class T>
struct map_futures_to_lvalue_reference_to_value_type
: lazy_conditional<
is_future<T>::value,
lazy_add_lvalue_reference<future_value<T>>,
identity<T>
>
{};
// turn futures into lvalue references to their values
using value_types = type_list_map<map_futures_to_lvalue_reference_to_value_type,types>;
template<class T>
struct is_not_void : std::integral_constant<bool, !std::is_void<T>::value> {};
// filter out void
using non_void_value_types = type_list_filter<is_not_void,value_types>;
// add lvalue reference
using references = type_list_map<std::add_lvalue_reference,non_void_value_types>;
// get the type of the result of applying the Function to the references
using type = typename type_list_result_of<Function, references>::type;
};
template<class Function, class... FutureOrT>
using result_of_continuation_t = typename result_of_continuation<Function,FutureOrT...>::type;
template<class Future, class Function>
struct has_then_impl
{
template<
class Future2,
typename = decltype(
std::declval<Future*>()->then(
std::declval<Function>()
)
)
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Future>(0));
};
template<class Future, class Function>
using has_then = typename has_then_impl<Future,Function>::type;
template<class FromFuture, class ToType, class ToFuture>
struct has_cast_impl
{
template<
class FromFuture1,
class Result = decltype(
*std::declval<FromFuture1*>.template cast<ToType>()
),
class = typename std::enable_if<
std::is_same<Result,ToFuture>::value
>::type
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<FromFuture>(0));
};
template<class FromFuture, class ToType, class ToFuture>
using has_cast = typename has_cast_impl<FromFuture,ToType,ToFuture>::type;
template<class T>
struct move_and_cast_functor
{
template<class U>
__AGENCY_ANNOTATION
T operator()(U& val) const
{
// XXX we should probably distinguish between two cases:
// 1. we are (moving and) casting from a unique future
// 2. we are casting from a shared future
//
// The shared future cast should not do a move,
// and perhaps it should only be allowed to cast to a reference type
// to do this cast, first cast val to a T&, then move the T
return static_cast<T>(std::move(val));
}
};
template<class Future>
struct has_share_impl
{
template<
class Future1,
class Result = decltype(
std::declval<Future1>().share()
),
class = typename std::enable_if<
is_future<Result>::value
>::type
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Future>(0));
};
template<class Future>
using has_share = typename has_share_impl<Future>::type;
} // end detail
template<class Future>
struct future_traits
{
public:
using future_type = Future;
using value_type = typename detail::future_value<future_type>::type;
template<class U>
using rebind_value = future_rebind_value_t<future_type,U>;
private:
// share() is implemented with the following priorities:
// 1. return fut.share(), if this function exists
// 2. return fut, if future_type is copiable
// 3. convert fut into a std::future via std::async() and call .share()
__agency_exec_check_disable__
template<class Future1,
class = typename std::enable_if<
std::is_copy_constructible<Future1>::value
>::type>
__AGENCY_ANNOTATION
static future_type share_impl2(Future1& fut)
{
return fut;
}
template<class Future1,
class = typename std::enable_if<
!std::is_copy_constructible<Future1>::value
>::type>
static std::shared_future<value_type> share_impl2(Future1& fut)
{
// turn fut into a std::future
std::future<value_type> std_fut = std::async(std::launch::deferred, [](Future1&& fut)
{
return fut.get();
},
std::move(fut));
// share the std::future
return std_fut.share();
}
__agency_exec_check_disable__
template<class Future1,
class = typename std::enable_if<
detail::has_share<Future1>::value
>::type>
__AGENCY_ANNOTATION
static auto share_impl1(Future1& fut) ->
decltype(fut.share())
{
return fut.share();
}
__agency_exec_check_disable__
template<class Future1,
class = typename std::enable_if<
!detail::has_share<Future1>::value
>::type>
__AGENCY_ANNOTATION
static auto share_impl1(Future1& fut) ->
decltype(share_impl2(fut))
{
return share_impl2(fut);
}
__AGENCY_ANNOTATION
static auto share_impl(future_type& fut) ->
decltype(share_impl1(fut))
{
return share_impl1(fut);
}
public:
using shared_future_type = decltype(share_impl(*std::declval<future_type*>()));
__AGENCY_ANNOTATION
static shared_future_type share(future_type& fut)
{
return share_impl(fut);
}
__AGENCY_ANNOTATION
static rebind_value<void> make_ready()
{
return rebind_value<void>::make_ready();
}
template<class T, class... Args>
__AGENCY_ANNOTATION
static rebind_value<T> make_ready(Args&&... args)
{
return rebind_value<T>::make_ready(std::forward<Args>(args)...);
}
template<class T>
__AGENCY_ANNOTATION
static rebind_value<typename std::decay<T>::type> make_ready(T&& value)
{
return rebind_value<typename std::decay<T>::type>::make_ready(std::forward<T>(value));
}
template<class Function,
class = typename std::enable_if<
detail::has_then<future_type,Function&&>::value
>::type>
__AGENCY_ANNOTATION
static rebind_value<
agency::detail::result_of_continuation_t<Function&&, future_type>
>
then(future_type& fut, Function&& f)
{
return fut.then(std::forward<Function>(f));
}
private:
__agency_exec_check_disable__
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast_impl2(future_type& fut,
typename std::enable_if<
std::is_constructible<rebind_value<U>,future_type&&>::value
>::type* = 0)
{
return rebind_value<U>(std::move(fut));
}
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast_impl2(future_type& fut,
typename std::enable_if<
!std::is_constructible<rebind_value<U>,future_type&&>::value
>::type* = 0)
{
return future_traits<future_type>::then(fut, detail::move_and_cast_functor<U>());
}
__agency_exec_check_disable__
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast_impl1(future_type& fut,
typename std::enable_if<
detail::has_cast<future_type,U,rebind_value<U>>::value
>::type* = 0)
{
return future_type::template cast<U>(fut);
}
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast_impl1(future_type& fut,
typename std::enable_if<
!detail::has_cast<future_type,U,rebind_value<U>>::value
>::type* = 0)
{
return cast_impl2<U>(fut);
}
public:
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast(future_type& fut)
{
return cast_impl1<U>(fut);
}
};
template<class T>
struct future_traits<std::future<T>>
{
public:
using future_type = std::future<T>;
using value_type = typename detail::future_value<future_type>::type;
template<class U>
using rebind_value = std::future<U>;
using shared_future_type = std::shared_future<T>;
__agency_exec_check_disable__
static shared_future_type share(future_type& fut)
{
return fut.share();
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
static rebind_value<void> make_ready()
{
return detail::make_ready_future();
}
__agency_exec_check_disable__
template<class U, class... Args>
__AGENCY_ANNOTATION
static rebind_value<U> make_ready(Args&&... args)
{
return detail::make_ready_future<U>(std::forward<Args>(args)...);
}
__agency_exec_check_disable__
template<class Function>
__AGENCY_ANNOTATION
static auto then(future_type& fut, Function&& f) ->
decltype(detail::monadic_then(fut, std::forward<Function>(f)))
{
return detail::monadic_then(fut, std::forward<Function>(f));
}
private:
template<class U>
static rebind_value<U> cast_impl(future_type& fut,
typename std::enable_if<
!std::is_constructible<rebind_value<U>,future_type&&>::value
>::type* = 0)
{
return future_traits<future_type>::then(fut, detail::move_and_cast_functor<U>());
}
template<class U>
static rebind_value<U> cast_impl(future_type& fut,
typename std::enable_if<
std::is_constructible<rebind_value<U>,future_type&&>::value
>::type* = 0)
{
return rebind_value<U>(std::move(fut));
}
public:
__agency_exec_check_disable__
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast(future_type& fut)
{
return cast_impl<U>(fut);
}
};
template<class T>
struct future_traits<std::shared_future<T>>
{
public:
using future_type = std::shared_future<T>;
using value_type = typename detail::future_value<future_type>::type;
template<class U>
using rebind_value = std::shared_future<U>;
using shared_future_type = std::shared_future<T>;
__agency_exec_check_disable__
static shared_future_type share(future_type& fut)
{
return fut;
}
__agency_exec_check_disable__
__AGENCY_ANNOTATION
static rebind_value<void> make_ready()
{
return detail::make_ready_shared_future();
}
__agency_exec_check_disable__
template<class U, class... Args>
__AGENCY_ANNOTATION
static rebind_value<U> make_ready(Args&&... args)
{
return detail::make_ready_shared_future<U>(std::forward<Args>(args)...);
}
__agency_exec_check_disable__
template<class Function>
__AGENCY_ANNOTATION
static auto then(future_type& fut, Function&& f) ->
decltype(detail::monadic_then(fut, std::forward<Function>(f)))
{
return detail::monadic_then(fut, std::forward<Function>(f));
}
private:
template<class U>
static rebind_value<U> cast_impl(future_type& fut,
typename std::enable_if<
!std::is_constructible<rebind_value<U>,future_type&&>::value
>::type* = 0)
{
return future_traits<future_type>::then(fut, detail::move_and_cast_functor<U>());
}
template<class U>
static rebind_value<U> cast_impl(future_type& fut,
typename std::enable_if<
std::is_constructible<rebind_value<U>,future_type&&>::value
>::type* = 0)
{
return rebind_value<U>(std::move(fut));
}
public:
__agency_exec_check_disable__
template<class U>
__AGENCY_ANNOTATION
static rebind_value<U> cast(future_type& fut)
{
return cast_impl<U>(fut);
}
};
namespace detail
{
template<class Tuple, size_t = std::tuple_size<Tuple>::value>
struct unwrap_small_tuple_result
{
using type = Tuple;
};
template<class Tuple>
struct unwrap_small_tuple_result<Tuple,0>
{
using type = void;
};
template<class Tuple>
struct unwrap_small_tuple_result<Tuple,1>
{
using type = typename std::tuple_element<0,Tuple>::type;
};
template<class Tuple>
using unwrap_small_tuple_result_t = typename unwrap_small_tuple_result<Tuple>::type;
template<class Tuple>
__AGENCY_ANNOTATION
void unwrap_small_tuple(Tuple&&,
typename std::enable_if<
std::tuple_size<
typename std::decay<Tuple>::type
>::value == 0
>::type* = 0)
{}
__agency_exec_check_disable__
template<class Tuple>
__AGENCY_ANNOTATION
unwrap_small_tuple_result_t<typename std::decay<Tuple>::type>
unwrap_small_tuple(Tuple&& t,
typename std::enable_if<
std::tuple_size<
typename std::decay<Tuple>::type
>::value == 1
>::type* = 0)
{
return std::move(agency::get<0>(t));
}
template<class Tuple>
__AGENCY_ANNOTATION
unwrap_small_tuple_result_t<typename std::decay<Tuple>::type>
unwrap_small_tuple(Tuple&& t,
typename std::enable_if<
(std::tuple_size<
typename std::decay<Tuple>::type
>::value > 1)
>::type* = 0)
{
return std::move(t);
}
template<class TypeList>
struct make_tuple_for_impl;
template<class... Types>
struct make_tuple_for_impl<type_list<Types...>>
{
using type = tuple<Types...>;
};
template<class TypeList>
using make_tuple_for = typename make_tuple_for_impl<TypeList>::type;
struct void_value {};
template<class T>
struct is_not_void_value : std::integral_constant<bool, !std::is_same<T,void_value>::value> {};
template<class T>
struct void_to_void_value : std::conditional<std::is_void<T>::value, void_value, T> {};
template<class... Futures>
struct tuple_of_future_values_impl
{
using value_types = type_list<
typename future_traits<Futures>::value_type...
>;
// map void to void_value
using mapped_value_types = type_list_map<void_to_void_value, value_types>;
// create a tuple from the list of types
using type = make_tuple_for<mapped_value_types>;
};
template<class... Futures>
using tuple_of_future_values = typename tuple_of_future_values_impl<Futures...>::type;
__agency_exec_check_disable__
template<class Future,
class = typename std::enable_if<
std::is_void<
typename future_traits<Future>::value_type
>::value
>::type
>
__AGENCY_ANNOTATION
void_value get_value(Future& fut)
{
fut.get();
return void_value{};
}
__agency_exec_check_disable__
template<class Future,
class = typename std::enable_if<
!std::is_void<
typename future_traits<Future>::value_type
>::value
>::type
>
__AGENCY_ANNOTATION
typename future_traits<Future>::value_type
get_value(Future& fut)
{
return fut.get();
}
__agency_exec_check_disable__
template<class... Futures>
__AGENCY_ANNOTATION
tuple_of_future_values<Futures...>
get_tuple_of_future_values(Futures&... futures)
{
return tuple_of_future_values<Futures...>(detail::get_value(futures)...);
}
template<class IndexSequence, class... Futures>
struct when_all_and_select_result;
template<size_t... Indices, class... Futures>
struct when_all_and_select_result<index_sequence<Indices...>,Futures...>
{
using type = decltype(
detail::unwrap_small_tuple(
detail::tuple_filter<is_not_void_value>(
detail::tuple_gather<Indices...>(
detail::get_tuple_of_future_values(
*std::declval<Futures*>()...
)
)
)
)
);
};
template<class IndexSequence, class... Futures>
using when_all_and_select_result_t = typename when_all_and_select_result<IndexSequence,Futures...>::type;
template<class... Futures>
struct when_all_result : when_all_and_select_result<index_sequence_for<Futures...>,Futures...> {};
template<class... Futures>
using when_all_result_t = typename when_all_result<Futures...>::type;
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/algorithm/construct_n.hpp>
#include <agency/detail/algorithm/copy.hpp>
#include <agency/detail/algorithm/copy_n.hpp>
#include <agency/detail/algorithm/destroy.hpp>
#include <agency/detail/algorithm/equal.hpp>
#include <agency/detail/algorithm/max.hpp>
#include <agency/detail/algorithm/min.hpp>
#include <agency/detail/algorithm/overlapped_copy.hpp>
#include <agency/detail/algorithm/overlapped_uninitialized_copy.hpp>
#include <agency/detail/algorithm/uninitialized_copy.hpp>
#include <agency/detail/algorithm/uninitialized_copy_n.hpp>
#include <agency/detail/algorithm/uninitialized_move_n.hpp>
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/utility.hpp>
#include <agency/container/array.hpp>
#include <agency/experimental/optional.hpp>
#include <agency/experimental/bounded_integer.hpp>
#include <cstddef>
#include <utility>
namespace agency
{
namespace experimental
{
namespace detail
{
namespace short_vector_detail
{
// XXX eliminate this once we've integrated this functionality elsewhere within the library
template<size_t first, size_t last>
struct static_for_loop_impl
{
template<class Function>
__AGENCY_ANNOTATION
static void invoke(Function&& f)
{
std::forward<Function>(f)(first);
static_for_loop_impl<first+1,last>::invoke(std::forward<Function>(f));
}
};
template<size_t first>
struct static_for_loop_impl<first,first>
{
template<class Function>
__AGENCY_ANNOTATION
static void invoke(Function&&)
{
}
};
template<size_t n, class Function>
__AGENCY_ANNOTATION
void static_for_loop(Function&& f)
{
static_for_loop_impl<0,n>::invoke(std::forward<Function>(f));
}
template<size_t first, size_t last>
struct bounded_index_impl
{
template<class T, size_t N>
__AGENCY_ANNOTATION
static T& index(array<T,N>& a, int idx)
{
if(first == idx)
{
return a[first];
}
return bounded_index_impl<first+1,last>::index(a, idx);
}
template<class T, size_t N>
__AGENCY_ANNOTATION
static const T& index(const array<T,N>& a, int idx)
{
if(first == idx)
{
return a[first];
}
return bounded_index_impl<first+1,last>::index(a, idx);
}
};
template<size_t first>
struct bounded_index_impl<first,first>
{
template<class T, size_t N>
__AGENCY_ANNOTATION
static T& index(array<T,N>& a, int)
{
return a[0];
}
template<class T, size_t N>
__AGENCY_ANNOTATION
static const T& index(const array<T,N>& a, int)
{
return a[0];
}
};
template<class T, size_t N>
__AGENCY_ANNOTATION
T& bounded_index(array<T,N>& a, int idx)
{
return bounded_index_impl<0,N>::index(a, idx);
}
template<class T, size_t N>
__AGENCY_ANNOTATION
const T& bounded_index(const array<T,N>& a, int idx)
{
return bounded_index_impl<0,N>::index(a, idx);
}
} // end short_vector_detail
} // end detail
template<class T, std::size_t N>
class short_vector
{
private:
template<class Function>
__AGENCY_ANNOTATION
void for_loop(Function&& f)
{
if(N == size())
{
detail::short_vector_detail::static_for_loop<N>(std::forward<Function>(f));
}
else
{
detail::short_vector_detail::static_for_loop<N>([&](std::size_t i)
{
if(i < size())
{
std::forward<Function>(f)(i);
}
});
}
}
public:
static const std::size_t static_max_size = N;
using value_type = T;
// encode the maximum size of this short_vector in its size_type
using size_type = bounded_size_t<static_max_size>;
using difference_type = std::ptrdiff_t;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = value_type*;
using const_pointer = const value_type*;
using iterator = pointer;
using const_iterator = const_pointer;
__AGENCY_ANNOTATION
short_vector()
: size_(0)
{
}
__AGENCY_ANNOTATION
short_vector(const short_vector&) = default;
__AGENCY_ANNOTATION
short_vector(short_vector&&) = default;
template<class Range>
__AGENCY_ANNOTATION
short_vector(Range&& other)
: size_(other.size())
{
// copy construct each element with placement new
for_loop([&](int i)
{
T& x = (*this)[i];
::new(&x) T(other[i]);
});
}
__AGENCY_ANNOTATION
reference operator[](size_type pos)
{
return array_[pos];
}
__AGENCY_ANNOTATION
const_reference operator[](size_type pos) const
{
return array_[pos];
}
__AGENCY_ANNOTATION
reference front()
{
return *begin();
}
__AGENCY_ANNOTATION
const_reference front() const
{
return *begin();
}
__AGENCY_ANNOTATION
reference back()
{
#ifdef __CUDA_ARCH__
return detail::short_vector_detail::bounded_index(array_, size() - 1);
#else
return array_[size()-1];
#endif
}
__AGENCY_ANNOTATION
const_reference back() const
{
#ifdef __CUDA_ARCH__
return detail::short_vector_detail::bounded_index(array_, size() - 1);
#else
return array_[size()-1];
#endif
}
__AGENCY_ANNOTATION
optional<value_type> back_or_none() const
{
//return empty() ? nullopt : make_optional(back());
// XXX this requires fewer registers than the equivalent above
// but depends on the knowledge that the implementation of back()
// returns a reference to an actual memory location even when size() == 0
auto result = make_optional(back());
if(empty()) result = nullopt;
return result;
}
__AGENCY_ANNOTATION
T* data()
{
return array_.data();
}
__AGENCY_ANNOTATION
const T* data() const
{
return array_.data();
}
__AGENCY_ANNOTATION
iterator begin()
{
return data();
}
__AGENCY_ANNOTATION
const_iterator begin() const
{
return data();
}
__AGENCY_ANNOTATION
const_iterator cbegin()
{
return begin();
}
__AGENCY_ANNOTATION
const_iterator cbegin() const
{
return begin();
}
__AGENCY_ANNOTATION
iterator end()
{
return data() + size();
}
__AGENCY_ANNOTATION
const_iterator end() const
{
return data() + size();
}
__AGENCY_ANNOTATION
const_iterator cend()
{
return end();
}
__AGENCY_ANNOTATION
const_iterator cend() const
{
return end();
}
__AGENCY_ANNOTATION
constexpr bool empty() const
{
return size() == size_type(0);
}
__AGENCY_ANNOTATION
constexpr size_type size() const
{
return size_;
}
__AGENCY_ANNOTATION
constexpr size_type max_size() const
{
return N;
}
__AGENCY_ANNOTATION
void fill(const T& value)
{
for(auto& e : *this)
{
e = value;
}
}
//__AGENCY_ANNOTATION
//void swap(short_vector& other)
//{
// for(size_type i = 0; i < size(); ++i)
// {
// agency::detail::adl_swap((*this)[i], other[i]);
// }
//}
private:
array<value_type, N> array_;
size_type size_;
};
template<class T, std::size_t N>
__AGENCY_ANNOTATION
bool operator==(const short_vector<T,N>& lhs, const short_vector<T,N>& rhs)
{
if(lhs.size() != rhs.size()) return false;
for(std::size_t i = 0; i < lhs.size(); ++i)
{
if(lhs[i] != rhs[i]) return false;
}
return true;
}
// XXX other relational operators here
// XXX get() here?
// XXX tuple specializations here?
template<class T, std::size_t N>
__AGENCY_ANNOTATION
void swap(short_vector<T,N>& a, short_vector<T,N>& b)
{
a.swap(b);
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/experimental/ranges/range_traits.hpp>
#include <agency/experimental/ranges/all.hpp>
#include <agency/experimental/short_vector.hpp>
#include <type_traits>
#include <utility>
#include <cassert>
namespace agency
{
namespace experimental
{
template<class RangeOfRanges, size_t max_tile_count_ = 8>
class small_untiled_view
{
private:
using inner_range_type = range_reference_t<RangeOfRanges>;
// the type of types we store is the all() type of the inner range of the RangeOfRanges
using tile_type = all_t<inner_range_type>;
template<class,size_t> friend class small_untiled_view;
public:
// XXX this should probably be size_type (which could be narrow, or signed)
// rather than size_t
static constexpr size_t max_tile_count = max_tile_count_;
using difference_type = range_difference_t<tile_type>;
using size_type = range_size_t<tile_type>;
using value_type = range_value_t<tile_type>;
using reference = range_reference_t<tile_type>;
__AGENCY_ANNOTATION
small_untiled_view() = default;
__AGENCY_ANNOTATION
small_untiled_view(const small_untiled_view&) = default;
template<class OtherRangeOfRanges,
__AGENCY_REQUIRES(
std::is_convertible<
all_t<range_reference_t<OtherRangeOfRanges>>,
tile_type
>::value
)>
__AGENCY_ANNOTATION
small_untiled_view(const small_untiled_view<OtherRangeOfRanges>& other)
: small_untiled_view(other.tile_size_, other.tiles())
{}
template<class OtherRangeOfRanges,
__AGENCY_REQUIRES(
std::is_convertible<
all_t<range_reference_t<OtherRangeOfRanges>>,
tile_type
>::value
)>
__AGENCY_ANNOTATION
small_untiled_view(size_t tile_size, OtherRangeOfRanges&& tiles)
: tile_size_(tile_size),
tiles_(std::forward<OtherRangeOfRanges>(tiles)) // XXX we actually need to call all() on each element of tiles for this to be correct
// it happens to work now because inner_range_type is convertible to tile_type for the ranges we're currently working with
{
}
__AGENCY_ANNOTATION
reference bracket_operator(std::integral_constant<size_t,max_tile_count-1>, size_t i) const
{
return tiles_[max_tile_count-1][i];
}
template<size_t tile_idx>
__AGENCY_ANNOTATION
reference bracket_operator(std::integral_constant<size_t,tile_idx>, size_t i) const
{
return i < tile_size_ ? tiles_[tile_idx][i] : bracket_operator(std::integral_constant<size_t,tile_idx+1>(), i - tile_size_);
}
__AGENCY_ANNOTATION
reference operator[](size_t i) const
{
// the performance of operator[] depends on the tiles_ array being statically indexed
return bracket_operator(std::integral_constant<size_t,0>(), i);
}
__AGENCY_ANNOTATION
size_t size() const
{
if(tiles_.empty()) return 0;
return tile_size_ * (tiles_.size() - 1) + tiles_.back().size();
}
// this iterator type is trivial:
// it just copies the view it came from (self_) and tracks its current position
// XXX might want to refactor this into detail::view_iterator or something because
// it is repeated inside of flattened_view
class iterator
{
public:
using value_type = typename small_untiled_view::value_type;
using reference = typename small_untiled_view::reference;
using difference_type = typename small_untiled_view::difference_type;
using pointer = value_type*;
using iterator_category = std::random_access_iterator_tag;
// dereference
__AGENCY_ANNOTATION
reference operator*() const
{
return self_[current_position_];
}
// pre-increment
__AGENCY_ANNOTATION
iterator operator++()
{
++current_position_;
return *this;
}
// pre-decrement
__AGENCY_ANNOTATION
iterator operator--()
{
--current_position_;
return *this;
}
// post-increment
__AGENCY_ANNOTATION
iterator operator++(int)
{
iterator result = *this;
current_position_++;
return result;
}
// post-decrement
__AGENCY_ANNOTATION
iterator operator--(int)
{
iterator result = *this;
current_position_--;
return result;
}
// add-assign
__AGENCY_ANNOTATION
iterator operator+=(size_type n)
{
current_position_ += n;
return *this;
}
// minus-assign
__AGENCY_ANNOTATION
iterator operator-=(size_type n)
{
current_position_ -= n;
return *this;
}
// add
__AGENCY_ANNOTATION
iterator operator+(size_type n)
{
iterator result = *this;
result += n;
return result;
}
// minus
__AGENCY_ANNOTATION
iterator operator-(size_type n)
{
iterator result = *this;
result -= n;
return result;
}
// bracket
__AGENCY_ANNOTATION
reference operator[](size_type n)
{
iterator tmp = *this + n;
return *tmp;
}
// equal
__AGENCY_ANNOTATION
bool operator==(const iterator& rhs) const
{
// we assume that *this and rhs came from the same flattened_view,
// so we do not compare their self_ members
return current_position_ == rhs.current_position_;
}
// not equal
__AGENCY_ANNOTATION
bool operator!=(const iterator& rhs) const
{
return !(*this == rhs);
}
// difference
__AGENCY_ANNOTATION
difference_type operator-(const iterator& rhs) const
{
return current_position_ - rhs.current_position_;
}
private:
friend small_untiled_view;
__AGENCY_ANNOTATION
iterator(size_type current_position, const small_untiled_view& self)
: current_position_(current_position),
self_(self)
{}
// XXX a more efficient implementation would track the current tile
// XXX and the current position within the tile
// could keep an iterator to the current tile
// would make operator- and operator+= less efficient because they would involve linear searches
size_type current_position_;
small_untiled_view self_;
};
__AGENCY_ANNOTATION
iterator begin() const
{
return iterator(0, *this);
}
__AGENCY_ANNOTATION
iterator end() const
{
return iterator(size(), *this);
}
private:
size_t tile_size_;
mutable short_vector<tile_type,max_tile_count> tiles_;
public:
__AGENCY_ANNOTATION
auto tiles() const ->
decltype(all(this->tiles_))
{
return all(this->tiles_);
}
};
template<size_t max_num_tiles, class RangeOfRanges>
__AGENCY_ANNOTATION
small_untiled_view<RangeOfRanges, max_num_tiles> untile(size_t tile_size, RangeOfRanges&& tiles)
{
return small_untiled_view<RangeOfRanges,max_num_tiles>(tile_size, std::forward<RangeOfRanges>(tiles));
}
template<class RangeOfRanges>
__AGENCY_ANNOTATION
small_untiled_view<RangeOfRanges> untile(size_t tile_size, RangeOfRanges&& tiles)
{
return small_untiled_view<RangeOfRanges>(tile_size, std::forward<RangeOfRanges>(tiles));
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/future/future_traits.hpp>
#include <agency/experimental/variant.hpp>
#include <agency/future.hpp>
#include <type_traits>
namespace agency
{
template<class Future, class... Futures>
class variant_future
{
private:
using variant_type = agency::experimental::variant<Future, Futures...>;
public:
using value_type = future_value_t<Future>;
template<class T>
using rebind_value = variant_future<
future_rebind_value_t<Future,T>,
future_rebind_value_t<Futures,T>...
>;
static_assert(detail::conjunction<is_future<Future>, is_future<Futures>...>::value, "All of variant_future's template parmeter types must be Futures.");
static_assert(detail::conjunction<std::is_same<value_type, future_value_t<Futures>>...>::value, "All Futures' value types must be the same.");
__AGENCY_ANNOTATION
variant_future() = default;
__AGENCY_ANNOTATION
variant_future(variant_future&&) = default;
template<class OtherFuture,
__AGENCY_REQUIRES(
std::is_constructible<variant_type,OtherFuture&&>::value
)>
__AGENCY_ANNOTATION
variant_future(OtherFuture&& other)
: variant_(std::forward<OtherFuture>(other))
{}
__AGENCY_ANNOTATION
variant_future& operator=(variant_future&& other) = default;
// this is the overload of make_ready() for non-void value_type
template<class T,
__AGENCY_REQUIRES(
std::is_constructible<value_type,T&&>::value
)>
static variant_future make_ready(T&& value)
{
// use the first Future type to create the ready state
return future_traits<Future>::make_ready(std::forward<T>(value));
}
// this is the overload of make_ready() for void value_type
template<bool deduced = true,
__AGENCY_REQUIRES(
deduced && std::is_void<value_type>::value
)>
static variant_future make_ready()
{
// use the first Future type to create the ready state
return future_traits<Future>::make_ready();
}
/// XXX consider eliminating this member and instead deriving variant_future from variant
__AGENCY_ANNOTATION
size_t index() const
{
return variant_.index();
}
/// Returns this variant_future's underlying variant object and invalidates this variant_future.
/// XXX consider eliminating this member and instead deriving variant_future from variant
__AGENCY_ANNOTATION
variant_type variant()
{
return std::move(variant_);
}
private:
struct valid_visitor
{
template<class T>
__AGENCY_ANNOTATION
bool operator()(const T& f) const
{
return f.valid();
}
};
public:
__AGENCY_ANNOTATION
bool valid() const
{
auto visitor = valid_visitor();
return agency::experimental::visit(visitor, variant_);
}
private:
struct wait_visitor
{
__agency_exec_check_disable__
template<class T>
__AGENCY_ANNOTATION
void operator()(T& f) const
{
f.wait();
}
};
public:
__AGENCY_ANNOTATION
void wait()
{
auto visitor = wait_visitor();
return agency::experimental::visit(visitor, variant_);
}
private:
struct get_visitor
{
__agency_exec_check_disable__
template<class T>
__AGENCY_ANNOTATION
value_type operator()(T& f) const
{
return f.get();
}
};
public:
__AGENCY_ANNOTATION
value_type get()
{
auto visitor = get_visitor();
return agency::experimental::visit(visitor, variant_);
}
private:
template<class FunctionRef>
struct then_visitor
{
FunctionRef f;
template<class T>
__AGENCY_ANNOTATION
future_then_result_t<variant_future, detail::decay_t<FunctionRef>>
operator()(T& future) const
{
// XXX should probably do this through a Future customization point
return agency::future_traits<T>::then(future, std::forward<FunctionRef>(f));
}
};
public:
template<class Function>
future_then_result_t<variant_future, Function>
then(Function&& f)
{
auto visitor = then_visitor<Function&&>{std::forward<Function>(f)};
return agency::experimental::visit(visitor, variant_);
}
private:
variant_type variant_;
};
namespace detail
{
template<class T>
struct is_variant_future : std::false_type {};
template<class Future, class... Futures>
struct is_variant_future<variant_future<Future,Futures...>> : std::true_type {};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/cuda/execution/detail/kernel/launch_kernel.hpp>
#include <agency/cuda/execution/detail/kernel/on_chip_shared_parameter.hpp>
#include <agency/cuda/device.hpp>
#include <agency/cuda/detail/future/async_future.hpp>
#include <memory>
namespace agency
{
namespace cuda
{
namespace detail
{
// XXX consider moving the stuff related to bulk_then_execute_closure into its own header
// as bulk_then_execution_concurrent_grid.hpp also depends on it
template<size_t block_dimension, class Function, class PredecessorPointer, class ResultPointer, class OuterParameterPointer, class InnerFactory>
struct bulk_then_execute_closure
{
Function f_;
PredecessorPointer predecessor_ptr_;
ResultPointer result_ptr_;
OuterParameterPointer outer_parameter_ptr_;
InnerFactory inner_factory_;
// this is the implementation for non-void predecessor
template<class F, class T1, class T2, class T3, class T4>
__device__ static inline void impl(F f, T1& predecessor, T2& result, T3& outer_param, T4& inner_param)
{
f(predecessor, result, outer_param, inner_param);
}
// this is the implementation for void predecessor
template<class F, class T2, class T3, class T4>
__device__ static inline void impl(F f, agency::detail::unit, T2& result, T3& outer_param, T4& inner_param)
{
f(result, outer_param, inner_param);
}
template<size_t dimension = block_dimension, __AGENCY_REQUIRES(dimension == 1)>
__device__ static inline bool is_first_thread_of_block()
{
return threadIdx.x == 0;
}
template<size_t dimension = block_dimension, __AGENCY_REQUIRES(dimension > 1)>
__device__ static inline bool is_first_thread_of_block()
{
agency::int3 idx{threadIdx.x, threadIdx.y, threadIdx.z};
// XXX this is actually the correct comparison
// but this comparison explodes the kernel resource requirements of programs like
// testing/unorganized/transpose which launch multidimensional thread blocks.
// Those large resource requirements lead to significantly degraded performance.
// We should investigate ways to mitigate those requirements and use the correct
// comparison. One idea is to teach on_chip_shared_parameter to avoid calling
// trivial constructors and destructors.
//return idx == agency::int3{0,0,0};
// XXX note that this comparison always fails -- it is incorrect
return false;
}
__device__ inline void operator()()
{
// we need to cast each dereference below to convert proxy references to ensure that f() only sees raw references
// XXX isn't there a more elegant way to deal with this?
using predecessor_reference = typename std::pointer_traits<PredecessorPointer>::element_type &;
using result_reference = typename std::pointer_traits<ResultPointer>::element_type &;
using outer_param_reference = typename std::pointer_traits<OuterParameterPointer>::element_type &;
on_chip_shared_parameter<InnerFactory> inner_parameter(is_first_thread_of_block(), inner_factory_);
impl(
f_,
static_cast<predecessor_reference>(*predecessor_ptr_),
static_cast<result_reference>(*result_ptr_),
static_cast<outer_param_reference>(*outer_parameter_ptr_),
inner_parameter.get()
);
}
};
template<size_t block_dimension, class Function, class PredecessorPointer, class ResultPointer, class OuterParameterPointer, class InnerFactory>
__host__ __device__
bulk_then_execute_closure<block_dimension,Function,PredecessorPointer,ResultPointer,OuterParameterPointer,InnerFactory>
make_bulk_then_execute_closure(Function f, PredecessorPointer predecessor_ptr, ResultPointer result_ptr, OuterParameterPointer outer_parameter_ptr, InnerFactory inner_factory)
{
return bulk_then_execute_closure<block_dimension,Function,PredecessorPointer,ResultPointer,OuterParameterPointer,InnerFactory>{f, predecessor_ptr, result_ptr, outer_parameter_ptr, inner_factory};
}
template<size_t block_dimension, class Function, class T, class ResultFactory, class OuterFactory, class InnerFactory>
struct bulk_then_execute_kernel
{
using result_type = agency::detail::result_of_t<ResultFactory()>;
using outer_arg_type = agency::detail::result_of_t<OuterFactory()>;
using predecessor_pointer_type = decltype(std::declval<agency::detail::asynchronous_state<T>&>().data());
using result_pointer_type = decltype(std::declval<agency::detail::asynchronous_state<result_type>&>().data());
using outer_parameter_pointer_type = decltype(std::declval<agency::detail::asynchronous_state<outer_arg_type>&>().data());
using closure_type = bulk_then_execute_closure<block_dimension, Function, predecessor_pointer_type, result_pointer_type, outer_parameter_pointer_type, InnerFactory>;
using type = decltype(&cuda_kernel<closure_type>);
constexpr static const type value = &cuda_kernel<closure_type>;
};
template<size_t block_dimension, class Function, class T, class ResultFactory, class OuterFactory, class InnerFactory>
using bulk_then_execute_kernel_t = typename bulk_then_execute_kernel<block_dimension,Function,T,ResultFactory,OuterFactory,InnerFactory>::type;
// this helper function returns a pointer to the kernel launched within launch_bulk_then_execute_kernel_impl()
template<size_t block_dimension, class Function, class T, class ResultFactory, class OuterFactory, class InnerFactory>
__host__ __device__
bulk_then_execute_kernel_t<block_dimension,Function,T,ResultFactory,OuterFactory,InnerFactory> make_bulk_then_execute_kernel(const Function& f, const asynchronous_state<T>&, const ResultFactory&, const OuterFactory&, const InnerFactory&)
{
return bulk_then_execute_kernel<block_dimension,Function,T,ResultFactory,OuterFactory,InnerFactory>::value;
}
template<class Shape>
__host__ __device__
::dim3 make_dim3(const Shape& shape)
{
agency::uint3 temp = agency::detail::shape_cast<uint3>(shape);
return ::dim3(temp.x, temp.y, temp.z);
}
// this is the main implementation of the other two launch_bulk_then_execute_kernel() functions
template<class Function, class Shape, class T, class ResultFactory, class OuterFactory, class InnerFactory>
__host__ __device__
async_future<agency::detail::result_of_t<ResultFactory()>>
launch_bulk_then_execute_kernel_impl(device_id device, detail::stream&& stream, Function f, ::dim3 grid_dim, Shape block_dim, const asynchronous_state<T>& predecessor_state, ResultFactory result_factory, OuterFactory outer_factory, InnerFactory inner_factory)
{
// create the asynchronous state to store the result
using result_type = agency::detail::result_of_t<ResultFactory()>;
detail::asynchronous_state<result_type> result_state = detail::make_asynchronous_state(result_factory);
// create the asynchronous state to store the outer shared argument
using outer_arg_type = agency::detail::result_of_t<OuterFactory()>;
detail::asynchronous_state<outer_arg_type> outer_arg_state = detail::make_asynchronous_state(outer_factory);
// wrap up f and its arguments into a closure to execute in a kernel
const size_t block_dimension = agency::detail::shape_size<Shape>::value;
auto closure = make_bulk_then_execute_closure<block_dimension>(f, predecessor_state.data(), result_state.data(), outer_arg_state.data(), inner_factory);
// make the kernel to launch
auto kernel = make_cuda_kernel(closure);
// launch the kernel
detail::try_launch_kernel_on_device(kernel, grid_dim, detail::make_dim3(block_dim), 0, stream.native_handle(), device.native_handle(), closure);
// create the next event
detail::event next_event(std::move(stream));
// schedule the outer arg's state for destruction when the next event is complete
detail::invalidate_and_destroy_when(outer_arg_state, next_event);
// return a new async_future corresponding to the next event & result state
return make_async_future(std::move(next_event), std::move(result_state));
}
template<class Function, class Shape, class T, class ResultFactory, class OuterFactory, class InnerFactory>
__host__ __device__
async_future<agency::detail::result_of_t<ResultFactory()>>
launch_bulk_then_execute_kernel(device_id device, Function f, ::dim3 grid_dim, Shape block_dim, async_future<T>& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactory inner_factory)
{
// since we're going to leave the predecessor future valid, we make a new dependent stream before calling launch_bulk_then_execute_kernel_impl()
return detail::launch_bulk_then_execute_kernel_impl(device, detail::async_future_event(predecessor).make_dependent_stream(device), f, grid_dim, block_dim, detail::async_future_state(predecessor), result_factory, outer_factory, inner_factory);
}
template<class Function, class Shape, class T, class ResultFactory, class OuterFactory, class InnerFactory>
__host__ __device__
async_future<agency::detail::result_of_t<ResultFactory()>>
launch_bulk_then_execute_kernel_and_invalidate_predecessor(device_id device, Function f, ::dim3 grid_dim, Shape block_dim, async_future<T>& predecessor, ResultFactory result_factory, OuterFactory outer_factory, InnerFactory inner_factory)
{
// invalidate the future by splitting it into its event and state
detail::event predecessor_event;
detail::asynchronous_state<T> predecessor_state;
agency::tie(predecessor_event, predecessor_state) = detail::invalidate_async_future(predecessor);
// launch the kernel
auto result = detail::launch_bulk_then_execute_kernel_impl(device, predecessor_event.make_dependent_stream_and_invalidate(device), f, grid_dim, block_dim, predecessor_state, result_factory, outer_factory, inner_factory);
// schedule the predecessor's state for destruction when the result future's event is complete
detail::invalidate_and_destroy_when(predecessor_state, detail::async_future_event(result));
return result;
}
template<size_t block_dimension, class Function, class T, class ResultFactory, class OuterFactory, class InnerFactory>
__host__ __device__
int max_block_size_of_bulk_then_execute_kernel(const device_id& device, const Function& f, const async_future<T>& predecessor, const ResultFactory& result_factory, const OuterFactory& outer_factory, const InnerFactory& inner_factory)
{
// temporarily switch the CUDA runtime's current device to the given device
detail::scoped_current_device scope(device);
// get a pointer to the kernel launched by launch_bulk_then_execute_kernel()
constexpr auto kernel = bulk_then_execute_kernel<block_dimension,Function,T,ResultFactory,OuterFactory,InnerFactory>::value;
// get the kernel's attributes
cudaFuncAttributes attr;
detail::throw_on_error(cudaFuncGetAttributes(&attr, kernel), "cuda::detail::max_block_size_of_bulk_then_execute_grid(): CUDA error after cudaFuncGetAttributes()");
// return the attribute of interest
return attr.maxThreadsPerBlock;
}
} // end detail
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_collected_result.hpp>
#include <agency/execution/executor/detail/utility/executor_bulk_result.hpp>
#include <agency/detail/factory.hpp>
namespace agency
{
namespace detail
{
// this is the case for when Function returns void
__agency_exec_check_disable__
template<class E, class Function, class Future, class... Factories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories)),
__AGENCY_REQUIRES(std::is_void<result_of_continuation_t<Function, executor_index_t<E>, Future, result_of_t<Factories()>&...>>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,void>
bulk_then_execute_with_auto_result(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, Factories... factories)
{
return bulk_then_execute_with_void_result(exec, f, shape, predecessor, factories...);
}
// this is the case for when Function returns non-void
// when Function does not return void, this function collects
// the results of each invocation into a container
// this container is returned through a future
template<class E, class Function, class Future, class... Factories,
__AGENCY_REQUIRES(BulkExecutor<E>()),
__AGENCY_REQUIRES(executor_execution_depth<E>::value == sizeof...(Factories)),
__AGENCY_REQUIRES(!std::is_void<result_of_continuation_t<Function, executor_index_t<E>, Future, result_of_t<Factories()>&...>>::value)
>
__AGENCY_ANNOTATION
executor_future_t<E,
executor_bulk_result_t<E,
result_of_continuation_t<Function,executor_index_t<E>,Future,result_of_t<Factories()>&...>
>
>
bulk_then_execute_with_auto_result(E& exec, Function f, executor_shape_t<E> shape, Future& predecessor, Factories... factories)
{
// compute the type of f's result
using result_type = result_of_continuation_t<Function,executor_index_t<E>,Future,result_of_t<Factories()>&...>;
// compute the type of container that will store f's results
using container_type = executor_bulk_result_t<E,result_type>;
// create a factory that will construct this type of container for us
auto result_factory = detail::make_construct<container_type>(shape);
// lower onto bulk_sync_execute_with_collected_result() with this result_factory
return detail::bulk_then_execute_with_collected_result(exec, f, shape, predecessor, result_factory, factories...);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/executor_traits/executor_shape.hpp>
#include <agency/execution/executor/executor_traits/detail/member_index_type_or.hpp>
#include <cstddef>
namespace agency
{
namespace detail
{
template<class BulkExecutor, bool Enable = is_bulk_executor<BulkExecutor>::value>
struct executor_index_impl
{
};
template<class BulkExecutor>
struct executor_index_impl<BulkExecutor,true>
{
using type = member_index_type_or_t<BulkExecutor,executor_shape_t<BulkExecutor>>;
};
} // end detail
template<class BulkExecutor>
struct executor_index : detail::executor_index_impl<BulkExecutor> {};
template<class BulkExecutor>
using executor_index_t = typename executor_index<BulkExecutor>::type;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/customization_points/bulk_sync_execute.hpp>
#include <agency/detail/invoke.hpp>
namespace agency
{
namespace detail
{
// this adaptor turns an Executor into a BulkSynchronousExecutor
// XXX eliminate this when Agency drops support for legacy executors
template<class E, bool Enable = BulkExecutor<E>()>
class bulk_synchronous_executor_adaptor;
template<class BulkExecutor>
class bulk_synchronous_executor_adaptor<BulkExecutor,true>
{
private:
BulkExecutor adapted_executor_;
public:
using execution_category = member_execution_category_or_t<BulkExecutor, unsequenced_execution_tag>;
using shape_type = executor_shape_t<BulkExecutor>;
using index_type = executor_index_t<BulkExecutor>;
template<class T>
using allocator = executor_allocator_t<BulkExecutor,T>;
__AGENCY_ANNOTATION
bulk_synchronous_executor_adaptor() = default;
__AGENCY_ANNOTATION
bulk_synchronous_executor_adaptor(const bulk_synchronous_executor_adaptor&) = default;
__AGENCY_ANNOTATION
bulk_synchronous_executor_adaptor(const BulkExecutor& other)
: adapted_executor_(other)
{}
template<class Function, class ResultFactory, class... SharedFactories>
__AGENCY_ANNOTATION
result_of_t<ResultFactory()>
bulk_sync_execute(Function f, shape_type shape, ResultFactory result_factory, SharedFactories... shared_factories)
{
return agency::bulk_sync_execute(adapted_executor_, f, shape, result_factory, shared_factories...);
}
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/unit.hpp>
#include <agency/memory/detail/unique_ptr.hpp>
#include <agency/memory/allocator/detail/allocator_traits/is_allocator.hpp>
#include <agency/detail/tuple/tuple_utility.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
struct unit_ptr : unit
{
using element_type = unit;
__AGENCY_ANNOTATION
unit& operator*()
{
return *this;
}
__AGENCY_ANNOTATION
const unit& operator*() const
{
return *this;
}
};
template<class T>
struct state_requires_storage
: std::integral_constant<
bool,
std::is_empty<T>::value || std::is_void<T>::value || agency::detail::is_empty_tuple<T>::value
>
{};
struct construct_ready_t {};
struct construct_not_ready_t {};
constexpr static construct_ready_t construct_ready{};
constexpr static construct_not_ready_t construct_not_ready{};
// XXX asynchronous_state should be templated on Allocator instead of Deleter
// XXX should try to collapse the implementation of asynchronous_state as much as possible between the two specializations
// XXX the default value of Deleter should be some polymorphic deleter type
// XXX the default state of the polymorphic deleter type should be an instance of default_delete<T>
template<class T,
class Allocator = std::allocator<T>,
bool requires_storage = state_requires_storage<T>::value>
class asynchronous_state
{
public:
static_assert(is_allocator<Allocator>::value, "Allocator is not an allocator.");
using value_type = T;
using storage_type = unique_ptr<T,allocation_deleter<Allocator>>;
using pointer = typename storage_type::pointer;
// constructs an invalid state
__AGENCY_ANNOTATION
asynchronous_state() = default;
// constructs an immediately ready state
__agency_exec_check_disable__
template<class... Args,
class = typename std::enable_if<
std::is_constructible<T,Args...>::value
>::type
>
__AGENCY_ANNOTATION
asynchronous_state(construct_ready_t, const Allocator& allocator, Args&&... ready_args)
: storage_(allocate_unique<T>(allocator, std::forward<Args>(ready_args)...))
{}
// constructs a not ready state from a pointer to the result and an allocator which can delete & deallocate the pointer
template<class OtherAllocator,
__AGENCY_REQUIRES(
std::is_constructible<
Allocator, OtherAllocator
>::value
)>
__AGENCY_ANNOTATION
asynchronous_state(construct_not_ready_t, pointer ptr, const OtherAllocator& allocator)
: storage_(ptr, allocation_deleter<Allocator>(allocator))
{}
// constructs a not ready state
// XXX we should avoid creating an object here
// instead, we should just create it uninitialized
// XXX the destructor should check whether the state requires destruction
__AGENCY_ANNOTATION
asynchronous_state(construct_not_ready_t, const Allocator& allocator)
: asynchronous_state(construct_ready, allocator, T{})
{}
__AGENCY_ANNOTATION
asynchronous_state(asynchronous_state&& other) = default;
template<class OtherT,
class OtherAllocator,
class = typename std::enable_if<
std::is_constructible<storage_type, typename asynchronous_state<OtherT,OtherAllocator>::storage_type&&>::value
>::type
>
__AGENCY_ANNOTATION
asynchronous_state(asynchronous_state<OtherT,OtherAllocator>&& other)
: storage_(std::move(other.storage_))
{}
__AGENCY_ANNOTATION
asynchronous_state& operator=(asynchronous_state&&) = default;
__AGENCY_ANNOTATION
pointer data() const
{
return storage_.get();
}
__AGENCY_ANNOTATION
T get()
{
T result = std::move(*storage_);
storage_.reset();
return std::move(result);
}
__AGENCY_ANNOTATION
bool valid() const
{
return storage_;
}
__AGENCY_ANNOTATION
void swap(asynchronous_state& other)
{
storage_.swap(other.storage_);
}
__AGENCY_ANNOTATION
storage_type& storage()
{
return storage_;
}
private:
template<class, class, bool>
friend class asynchronous_state;
storage_type storage_;
};
// when a type is empty, we can create instances on the fly upon dereference
template<class T>
struct empty_type_ptr
{
using element_type = T;
__AGENCY_ANNOTATION
empty_type_ptr() = default;
__AGENCY_ANNOTATION
empty_type_ptr(const empty_type_ptr&) = default;
template<class U,
__AGENCY_REQUIRES(
std::is_constructible<T,U&&>::value
)>
__AGENCY_ANNOTATION
empty_type_ptr(U&& value)
{
// this evaluates T's copy constructor's effects, but nothing is stored
// because both T and empty_type_ptr are empty types
new (this) T(std::forward<U>(value));
}
__AGENCY_ANNOTATION
T& operator*()
{
return *reinterpret_cast<T*>(this);
}
__AGENCY_ANNOTATION
const T& operator*() const
{
return *reinterpret_cast<const T*>(this);
}
// even though T is empty and there is nothing to swap,
// swap(T,T) may have effects, so call it
__AGENCY_ANNOTATION
void swap(empty_type_ptr& other)
{
detail::adl_swap(**this, *other);
}
};
template<>
struct empty_type_ptr<void> : unit_ptr
{
__AGENCY_ANNOTATION
empty_type_ptr() = default;
__AGENCY_ANNOTATION
empty_type_ptr(const empty_type_ptr&) = default;
// allow copy construction from empty_type_ptr<T>
// this is analogous to casting a T to void
template<class T>
__AGENCY_ANNOTATION
empty_type_ptr(const empty_type_ptr<T>&)
: empty_type_ptr()
{}
__AGENCY_ANNOTATION
void swap(empty_type_ptr&) const
{
// swapping a void has no effect
}
};
// zero storage optimization
template<class T, class Allocator>
class asynchronous_state<T,Allocator,true> : private empty_type_ptr<T>
{
private:
using super_t = empty_type_ptr<T>;
public:
static_assert(is_allocator<Allocator>::value, "Allocator is not an allocator.");
using value_type = T;
using pointer = empty_type_ptr<T>;
using storage_type = void;
// constructs an invalid state
__AGENCY_ANNOTATION
asynchronous_state() : super_t(), valid_(false) {}
// constructs an immediately ready state
// the allocator is ignored because this state requires no storage
template<class OtherAllocator,
class OtherT,
__AGENCY_REQUIRES(
std::is_constructible<Allocator,OtherAllocator>::value
),
__AGENCY_REQUIRES(
std::is_constructible<T,OtherT&&>::value
)>
__AGENCY_ANNOTATION
asynchronous_state(construct_ready_t, const OtherAllocator&, OtherT&& value) : super_t(std::forward<OtherT>(value)), valid_(true) {}
// constructs an immediately ready state
// the allocator is ignored because this state requires no storage
template<class OtherAllocator,
__AGENCY_REQUIRES(std::is_constructible<Allocator,OtherAllocator>::value)>
__AGENCY_ANNOTATION
asynchronous_state(construct_ready_t, const OtherAllocator&) : super_t(), valid_(true) {}
// constructs a not ready state
// the allocator is ignored because this state requires no storage
template<class OtherAllocator,
__AGENCY_REQUIRES(std::is_constructible<Allocator,OtherAllocator>::value)>
__AGENCY_ANNOTATION
asynchronous_state(construct_not_ready_t, const OtherAllocator&) : super_t(), valid_(true) {}
__AGENCY_ANNOTATION
asynchronous_state(asynchronous_state&& other) : super_t(std::move(other)), valid_(other.valid_)
{
other.valid_ = false;
}
// 1. allow moves to void states (this simply discards the state)
// 2. allow moves to empty types if the type can be constructed from an empty argument list
// 3. allow upcasts to a base T from a derived U
template<class OtherT,
class OtherAllocator,
__AGENCY_REQUIRES(
std::is_constructible<empty_type_ptr<T>,empty_type_ptr<OtherT>&&>::value
)>
__AGENCY_ANNOTATION
asynchronous_state(asynchronous_state<OtherT,OtherAllocator>&& other)
: super_t(std::move(other)), valid_(other.valid())
{
if(valid())
{
// invalidate the old state by calling .get() if it was valid when we received it
other.get();
}
}
__AGENCY_ANNOTATION
asynchronous_state& operator=(asynchronous_state&& other)
{
valid_ = other.valid_;
other.valid_ = false;
return *this;
}
__AGENCY_ANNOTATION
empty_type_ptr<T> data() const
{
return *this;
}
__AGENCY_ANNOTATION
T get()
{
valid_ = false;
return get_impl(std::is_void<T>());
}
__AGENCY_ANNOTATION
bool valid() const
{
return valid_;
}
__AGENCY_ANNOTATION
void swap(asynchronous_state& other)
{
super_t::swap(other);
bool other_valid_old = other.valid_;
other.valid_ = valid_;
valid_ = other_valid_old;
}
private:
__AGENCY_ANNOTATION
T get_impl(std::false_type)
{
return super_t::operator*();
}
__AGENCY_ANNOTATION
T get_impl(std::true_type)
{
return;
}
template<class, class, bool>
friend class asynchronous_state;
bool valid_;
};
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/shape.hpp>
#include <agency/execution/executor/executor_traits/executor_shape.hpp>
#include <utility>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class Executor, class Shape>
struct has_max_shape_dimensions_impl
{
template<
class Executor1,
class ReturnType = decltype(std::declval<Executor1>().max_shape_dimensions()),
__AGENCY_REQUIRES(std::is_same<ReturnType,Shape>::value)
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<Executor>(0));
};
template<class Executor, class Shape>
using has_max_shape_dimensions = typename has_max_shape_dimensions_impl<Executor,Shape>::type;
} // end detail
// this overload handles the case when an Executor has .max_shape_dimensions()
__agency_exec_check_disable__
template<class E,
__AGENCY_REQUIRES(detail::Executor<E>()),
__AGENCY_REQUIRES(detail::has_max_shape_dimensions<E,executor_shape_t<E>>::value)
>
__AGENCY_ANNOTATION
executor_shape_t<E> max_shape_dimensions(const E& exec)
{
return exec.max_shape_dimensions();
}
// this overload handles the case when an Executor does not have .max_shape_dimensions()
template<class E,
__AGENCY_REQUIRES(detail::Executor<E>()),
__AGENCY_REQUIRES(!detail::has_max_shape_dimensions<E,executor_shape_t<E>>::value)
>
__AGENCY_ANNOTATION
executor_shape_t<E> max_shape_dimensions(const E&)
{
return detail::max_shape_dimensions<executor_shape_t<E>>();
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/has_member.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/executor_future.hpp>
#include <agency/execution/executor/executor_traits/executor_execution_depth.hpp>
#include <agency/execution/execution_agent.hpp>
#include <agency/execution/execution_policy/execution_policy_traits.hpp>
#include <utility>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class T>
using executor_member_t = typename T::executor_type;
template<class T>
using executor_member_function_t = decay_t<decltype(std::declval<T*>()->executor())>;
template<class ExecutionPolicy>
struct execution_policy_executor
{
// to detect an execution policy's executor_type,
// first look for a member type named executor_type,
// if it does not exist, look for a member function named .executor()
using type = detected_or_t<
executor_member_function_t<ExecutionPolicy>,
executor_member_t, ExecutionPolicy
>;
};
template<class ExecutionPolicy>
using execution_policy_executor_t = typename execution_policy_executor<ExecutionPolicy>::type;
// XXX nvcc can't correctly compile this implementation of has_executor_member_function in all cases
//// detects whether T::executor() exists
//template<class T>
//using has_executor_member_function = is_detected<executor_member_function_t, T>;
template<class T>
struct has_executor_member_function_impl
{
template<
class T2,
class = decltype(std::declval<T2*>()->executor())
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
using has_executor_member_function = typename has_executor_member_function_impl<T>::type;
// XXX nvcc can't correctly compile this implementation in all cases
// detects whether T::executor_type exists
//template<class T>
//using has_executor_member_type = is_detected<executor_member_t, T>;
__DEFINE_HAS_MEMBER_TYPE(has_executor_member_type, executor_type);
// detects whether T::executor() or T::executor_type or exist
template<class T>
using has_executor = disjunction<
has_executor_member_function<T>,
has_executor_member_type<T>
>;
// XXX nvcc can't correctly compile this implementation in all cases
//template<class T>
//using execution_agent_type_member_t = typename T::execution_agent_type;
//
//template<class T>
//struct execution_policy_agent
//{
// using type = detected_t<execution_agent_type_member_t, T>;
//};
__DEFINE_HAS_MEMBER_TYPE(has_execution_agent_type, execution_agent_type);
template<class ExecutionPolicy, class Enable = void>
struct execution_policy_agent {};
template<class ExecutionPolicy>
struct execution_policy_agent<ExecutionPolicy,typename std::enable_if<has_execution_agent_type<ExecutionPolicy>::value>::type>
{
using type = typename ExecutionPolicy::execution_agent_type;
};
template<class T>
using execution_policy_agent_t = typename execution_policy_agent<T>::type;
// XXX nvcc can't correctly compile this implementation in all cases
//template<class T>
//using has_execution_agent_type = is_detected<execution_policy_agent_t, T>;
// returns T's ::execution_agent_type or nonesuch if T has no ::execution_agent_type
template<class T, class Default>
using execution_policy_agent_or_t = lazy_conditional_t<
has_execution_agent_type<T>::value,
execution_policy_agent<T>,
identity<Default>
>;
// XXX nvcc can't correctly compile this implementation of execution_policy_param in all cases
template<class T>
using param_member_t = typename T::param_type;
template<class T>
using param_member_function_t = decay_t<decltype(std::declval<T*>()->param())>;
template<class ExecutionPolicy>
struct execution_policy_param
{
// to detect an execution policy's param_type,
// first look for a member type named param_type,
// if it does not exist, look for a member function named .param()
using type = detected_or_t<
param_member_function_t<ExecutionPolicy>,
param_member_t, ExecutionPolicy
>;
};
template<class ExecutionPolicy>
using execution_policy_param_t = typename execution_policy_param<ExecutionPolicy>::type;
// XXX nvcc can't correctly compile this implementation of has_param_member_type in all cases
//// detects whether T::param() exists
//template<class T>
//using has_param_member_function = is_detected<param_member_function_t, T>;
template<class T>
struct has_param_member_function_impl
{
template<
class T2,
class = decltype(std::declval<T2*>()->param())
>
static std::true_type test(int);
template<class>
static std::false_type test(...);
using type = decltype(test<T>(0));
};
template<class T>
using has_param_member_function = typename has_param_member_function_impl<T>::type;
// XXX nvcc can't correctly compile this implementation of has_param_member_type in all cases
// detects whether T::param_type exists
//template<class T>
//using has_param_member_type = is_detected<param_member_t, T>;
__DEFINE_HAS_MEMBER_TYPE(has_param_member_type, param_type);
// detects whether T::param() or T::param_type or exist
template<class T>
using has_param = disjunction<
has_param_member_function<T>,
has_param_member_type<T>
>;
template<class ExecutionPolicy>
struct execution_policy_execution_depth
: executor_execution_depth<
execution_policy_executor_t<ExecutionPolicy>
>
{};
template<class ExecutionPolicy>
struct execution_policy_execution_category
{
using type = typename execution_agent_traits<execution_policy_agent_t<ExecutionPolicy>>::execution_category;
};
template<class ExecutionPolicy>
using execution_policy_execution_category_t = typename execution_policy_execution_category<ExecutionPolicy>::type;
template<class ExecutionPolicy>
using policy_is_sequenced = std::is_same<sequenced_execution_tag, execution_policy_execution_category_t<ExecutionPolicy>>;
template<class ExecutionPolicy>
using policy_is_parallel = std::is_same<parallel_execution_tag, execution_policy_execution_category_t<ExecutionPolicy>>;
template<class ExecutionPolicy>
using policy_is_concurrent = std::is_same<concurrent_execution_tag, execution_policy_execution_category_t<ExecutionPolicy>>;
template<class ExecutionPolicy>
using policy_is_scoped_parallel_concurrent = std::is_same<
scoped_execution_tag<
parallel_execution_tag,
concurrent_execution_tag
>,
execution_policy_execution_category_t<ExecutionPolicy>
>;
template<class ExecutionPolicy>
using policy_is_scoped_concurrent_concurrent = std::is_same<
scoped_execution_tag<
concurrent_execution_tag,
concurrent_execution_tag
>,
execution_policy_execution_category_t<ExecutionPolicy>
>;
} // end detail
template<class T>
struct is_execution_policy : detail::conjunction<
detail::has_execution_agent_type<T>,
detail::has_executor<T>,
detail::has_param<T>
> {};
namespace detail
{
template<class ExecutionPolicy, class T, class Enable = void>
struct execution_policy_future_impl {};
template<class ExecutionPolicy, class T>
struct execution_policy_future_impl<ExecutionPolicy,T,typename std::enable_if<has_executor<ExecutionPolicy>::value>::type>
{
using type = executor_future_t<execution_policy_executor_t<ExecutionPolicy>, T>;
};
} // end detail
template<class ExecutionPolicy, class T>
struct execution_policy_future
{
using type = typename detail::execution_policy_future_impl<
detail::decay_t<ExecutionPolicy>,
T
>::type;
};
template<class ExecutionPolicy, class T>
using execution_policy_future_t = typename execution_policy_future<ExecutionPolicy,T>::type;
} // end agency
<file_sep>#pragma once
#include <agency/execution/executor/customization_points/async_execute.hpp>
#include <agency/execution/executor/customization_points/bulk_async_execute.hpp>
#include <agency/execution/executor/customization_points/bulk_sync_execute.hpp>
#include <agency/execution/executor/customization_points/bulk_then_execute.hpp>
#include <agency/execution/executor/customization_points/future_cast.hpp>
#include <agency/execution/executor/customization_points/make_ready_future.hpp>
#include <agency/execution/executor/customization_points/max_shape_dimensions.hpp>
#include <agency/execution/executor/customization_points/sync_execute.hpp>
#include <agency/execution/executor/customization_points/then_execute.hpp>
#include <agency/execution/executor/customization_points/unit_shape.hpp>
<file_sep>#include <agency/execution/executor/executor_traits.hpp>
#include <type_traits>
#include <iostream>
#include "../test_executors.hpp"
int main()
{
static_assert(!agency::detail::is_detected<agency::executor_shape_t, not_an_executor>::value, "executor_shape_t<not_an_executor> should not be detected");
static_assert(agency::detail::is_detected_exact<size_t, agency::executor_shape_t, bulk_executor_without_shape_type>::value, "bulk_executor_without_shape_type should have size_t shape_type");
static_assert(agency::detail::is_detected_exact<bulk_executor_with_shape_type::shape_type, agency::executor_shape_t, bulk_executor_with_shape_type>::value, "bulk_executor_with_shape_type should have bulk_executor_with_shape_type::shape_type shape_type");
static_assert(agency::detail::is_detected_exact<size_t, agency::executor_shape_t, bulk_continuation_executor>::value, "bulk_continuation_executor should have size_t shape_type");
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/execution_agent/execution_agent_traits.hpp>
#include <agency/execution/execution_categories.hpp>
#include <agency/detail/tuple/tuple_utility.hpp>
#include <agency/detail/unwrap_tuple_if_not_scoped.hpp>
#include <agency/detail/index_tuple.hpp>
#include <agency/detail/index_lexicographical_rank.hpp>
#include <utility>
#include <type_traits>
namespace agency
{
namespace detail
{
template<class OuterExecutionAgent, class Enable = void>
struct execution_group_base {};
// if execution_group's OuterExecutionAgent has a shared_param_type,
// then execution_group needs to have a shared_param_type which can be constructed from execution_group::param_type
template<class OuterExecutionAgent>
struct execution_group_base<OuterExecutionAgent,
typename std::enable_if<
detail::has_shared_param_type<OuterExecutionAgent>::value
>::type>
{
struct shared_param_type : public OuterExecutionAgent::shared_param_type
{
template<class ParamType, class... Args>
__AGENCY_ANNOTATION
shared_param_type(const ParamType& param, Args&&... args)
: OuterExecutionAgent::shared_param_type(param.outer(), std::forward<Args>(args)...)
{}
};
};
template<class OuterExecutionAgent, class InnerExecutionAgent>
class execution_group : public execution_group_base<OuterExecutionAgent>
{
private:
using outer_traits = execution_agent_traits<OuterExecutionAgent>;
using inner_traits = execution_agent_traits<InnerExecutionAgent>;
using outer_execution_category = typename outer_traits::execution_category;
using inner_execution_category = typename inner_traits::execution_category;
using outer_index_type = typename outer_traits::index_type;
using inner_index_type = typename inner_traits::index_type;
public:
using index_type = decltype(
__tu::tuple_cat_apply(
agency::detail::index_tuple_maker{},
agency::detail::make_tuple_if_not_scoped<outer_execution_category>(std::declval<outer_index_type>()),
agency::detail::make_tuple_if_not_scoped<inner_execution_category>(std::declval<inner_index_type>())
)
);
private:
// concatenates an outer index with an inner index
// returns an index_tuple with arithmetic ops (not a std::tuple)
// XXX move this into index_tuple.hpp?
__AGENCY_ANNOTATION
static index_type index_cat(const outer_index_type& outer_idx, const inner_index_type& inner_idx)
{
return __tu::tuple_cat_apply(
agency::detail::index_tuple_maker{},
agency::detail::make_tuple_if_not_scoped<outer_execution_category>(outer_idx),
agency::detail::make_tuple_if_not_scoped<inner_execution_category>(inner_idx)
);
}
public:
using execution_category = scoped_execution_tag<
outer_execution_category,
inner_execution_category
>;
using outer_execution_agent_type = OuterExecutionAgent;
using inner_execution_agent_type = InnerExecutionAgent;
class param_type
{
private:
typename outer_traits::param_type outer_;
typename inner_traits::param_type inner_;
public:
__AGENCY_ANNOTATION
param_type() = default;
__AGENCY_ANNOTATION
param_type(const param_type&) = default;
__AGENCY_ANNOTATION
param_type(const typename outer_traits::param_type& o, const typename inner_traits::param_type& i) : outer_(o), inner_(i) {}
__AGENCY_ANNOTATION
const typename outer_traits::param_type& outer() const
{
return outer_;
}
__AGENCY_ANNOTATION
const typename inner_traits::param_type& inner() const
{
return inner_;
}
};
__AGENCY_ANNOTATION
outer_execution_agent_type& outer()
{
return outer_agent_;
}
__AGENCY_ANNOTATION
const outer_execution_agent_type& outer() const
{
return outer_agent_;
}
__AGENCY_ANNOTATION
inner_execution_agent_type& inner()
{
return inner_agent_;
}
__AGENCY_ANNOTATION
const inner_execution_agent_type& inner() const
{
return inner_agent_;
}
__AGENCY_ANNOTATION
index_type index() const
{
return index_cat(this->outer().index(), this->inner().index());
}
using domain_type = lattice<index_type>;
__AGENCY_ANNOTATION
domain_type domain() const
{
auto outer_domain = outer().domain();
auto inner_domain = this->inner().domain();
auto min = index_cat(outer_domain.min(), inner_domain.min());
auto max = index_cat(outer_domain.max(), inner_domain.max());
return domain_type{min,max};
}
// XXX can probably move this to execution_agent_traits
__AGENCY_ANNOTATION
static domain_type domain(const param_type& param)
{
auto outer_domain = outer_traits::domain(param.outer());
auto inner_domain = inner_traits::domain(param.inner());
auto min = index_cat(outer_domain.min(), inner_domain.min());
auto max = index_cat(outer_domain.max(), inner_domain.max());
return domain_type{min,max};
}
__AGENCY_ANNOTATION
auto group_shape() const
-> decltype(this->domain().shape())
{
return domain().shape();
}
__AGENCY_ANNOTATION
auto group_size() const
-> decltype(this->outer().group_size() * inner().group_size())
{
return outer().group_size() * inner().group_size();
}
__AGENCY_ANNOTATION
auto rank() const
-> decltype(this->group_size())
{
return agency::detail::index_lexicographical_rank(index(), group_shape());
}
__AGENCY_ANNOTATION
bool elect() const
{
return outer().elect() && inner().elect();
}
protected:
__agency_exec_check_disable__
__AGENCY_ANNOTATION
execution_group(const index_type& index, const param_type& param)
: outer_agent_(detail::make_agent<outer_execution_agent_type>(outer_index(index), param.outer())),
inner_agent_(detail::make_agent<inner_execution_agent_type>(inner_index(index), param.inner()))
{}
// XXX ensure all the shared params are the right type
__agency_exec_check_disable__
template<class SharedParam1, class... SharedParams>
__AGENCY_ANNOTATION
execution_group(const index_type& index, const param_type& param, SharedParam1& shared_param1, SharedParams&... shared_params)
: outer_agent_(agency::detail::make_agent<outer_execution_agent_type>(outer_index(index), param.outer(), shared_param1)),
inner_agent_(agency::detail::make_agent<inner_execution_agent_type>(inner_index(index), param.inner(), shared_params...))
{}
// friend execution_agent_traits so it has access to the constructors
template<class> friend struct agency::execution_agent_traits;
__AGENCY_ANNOTATION
static outer_index_type outer_index(const index_type& index)
{
return __tu::tuple_head(index);
}
__AGENCY_ANNOTATION
static inner_index_type inner_index(const index_type& index)
{
return detail::unwrap_tuple_if_not_scoped<inner_execution_category>(detail::forward_tail(index));
}
outer_execution_agent_type outer_agent_;
inner_execution_agent_type inner_agent_;
};
} // end detail
} // end agency
<file_sep>/// \file
/// \brief Contains definition of basic_execution_policy.
///
#pragma once
#include <agency/detail/config.hpp>
#include <agency/tuple.hpp>
#include <agency/execution/execution_agent.hpp>
#include <agency/execution/execution_policy/execution_policy_traits.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/execution/executor/scoped_executor.hpp>
#include <utility>
#include <tuple>
#include <type_traits>
#include <initializer_list>
namespace agency
{
// declare basic_execution_policy for replace_executor()'s signature below
template<class ExecutionAgent,
class BulkExecutor,
class DerivedExecutionPolicy = void>
class basic_execution_policy;
// declare replace_executor() so basic_execution_policy.on() can use it below
template<class ExecutionPolicy, class Executor>
__AGENCY_ANNOTATION
typename std::enable_if<
is_executor<Executor>::value,
basic_execution_policy<typename ExecutionPolicy::execution_agent_type, Executor>
>::type
replace_executor(const ExecutionPolicy& policy, const Executor& exec);
namespace detail
{
template<class... Types>
struct last_type_impl
{
typedef typename std::tuple_element<sizeof...(Types) - 1, std::tuple<Types...>>::type type;
};
template<>
struct last_type_impl<>
{
typedef void type;
};
template<class... Types>
using last_type = typename last_type_impl<Types...>::type;
template<class ParamType, class... Args>
struct is_scoped_call
: std::integral_constant<
bool,
is_execution_policy<last_type<Args...>>::value &&
is_constructible_from_type_list<
ParamType,
type_list_drop_last<
type_list<Args...>
>
>::value
>
{};
template<class ParamType, class... Args>
struct is_flat_call
: std::integral_constant<
bool,
is_constructible_from_type_list<ParamType, type_list<Args...>>::value
>
{};
// declare scoped_execution_policy for basic_execution_policy's use below
template<class ExecutionPolicy1, class ExecutionPolicy2>
class scoped_execution_policy;
} // end detail
/// \brief The basic type from which all of Agency's execution policies derive their common functionality.
/// \ingroup execution_policies
///
///
/// basic_execution_policy defines the essential functionality which all of Agency's execution policies share in common.
/// Because all of Agency's execution policy types publicly inherit from basic_execution_policy, the documentation for
/// their common, public functionality is collected here.
///
/// basic_execution_policy may also be used to define custom execution policy types by instantiating basic_execution_policy
/// with an execution agent type and an executor type. Either of these types may be user-defined.
///
/// \tparam ExecutionAgent The type of execution agent created by the basic_execution_policy.
/// \tparam Executor The type of executor associated with the basic_execution_policy.
/// \tparam DerivedExecutionPolicy The name of the execution policy deriving from this basic_execution_policy.
/// `void` indicates that no execution policy will be derived from this basic_execution_policy.
template<class ExecutionAgent,
class Executor,
class DerivedExecutionPolicy>
class basic_execution_policy
{
public:
// validate that it makes sense to execute the agent's requirements using the executor's guarantees
static_assert(detail::is_weaker_than<
typename execution_agent_traits<ExecutionAgent>::execution_category,
executor_execution_category_t<Executor>
>::value,
"basic_execution_policy: ExecutionAgent's forward progress requirements cannot be satisfied by Executor's guarantees."
);
/// \brief The type of execution agent associated with this basic_execution_policy.
using execution_agent_type = ExecutionAgent;
/// \brief The type of executor associated with this basic_execution_policy.
using executor_type = Executor;
private:
using derived_type = typename std::conditional<
std::is_same<DerivedExecutionPolicy,void>::value,
basic_execution_policy,
DerivedExecutionPolicy
>::type;
public:
/// \brief The type of this execution policy's parameterization.
using param_type = typename execution_agent_traits<execution_agent_type>::param_type;
/// \brief The default constructor default constructs this execution policy's associated executor and parameterization.
__agency_exec_check_disable__
__AGENCY_ANNOTATION
basic_execution_policy() = default;
/// \brief This constructor constructs a new basic_execution_policy given a parameterization and executor.
/// \param param The parameterization of this basic_execution_policy.
/// \param executor The executor to associate with this basic_execution_policy.
__agency_exec_check_disable__
__AGENCY_ANNOTATION
basic_execution_policy(const param_type& param, const executor_type& executor = executor_type{})
: param_(param),
executor_(executor)
{}
/// \brief Returns this execution policy's parameterization.
__AGENCY_ANNOTATION
const param_type& param() const
{
return param_;
}
/// \brief Returns this execution policy's associated executor.
__AGENCY_ANNOTATION
executor_type& executor() const
{
return executor_;
}
/// \brief Replaces this execution policy's executor with another.
///
///
/// on() returns a new execution policy identical to `*this` but
/// whose associated executor has been replaced by another executor.
///
/// For example, we can require an otherwise parallel task to execute sequentially
/// in the current thread by executing the task on a sequenced_executor:
///
/// ~~~~{.cpp}
/// agency::sequenced_executor seq_exec;
///
/// // require the parallel_agents induced by par to execute sequentially on seq_exec
/// agency::bulk_invoke(agency::par(10).on(seq_exec), [](agency::parallel_agent& self)
/// {
/// std::cout << self.index() << std::endl;
/// });
///
/// // the integers [0,10) are printed in sequence
/// ~~~~
///
/// Note that using on() does not change the type of execution agent object created by the policy;
/// it only changes the underlying physical execution of these agents. The relative
/// forward progress characteristics of the execution agents required by the execution policy
/// and the forward progress guarantees must make sense; the forward progress guarantees made by
/// the executor may not weaken the requirements of the policy. A program that attempts to do this
/// is ill-formed and will not compile. In this example's case, because agency::sequenced_executor
/// makes a stronger guarantee (sequenced execution) than does agency::par (parallel execution),
/// the program is well-formed.
///
///
/// \param exec The other executor to associate with the returned execution policy.
/// \return An execution policy equivalent to `*this` but whose associated executor is a copy of `exec`.
/// The type of the result is an execution policy type `Policy` with the following characteristics:
/// * `Policy::execution_agent_type` is `execution_agent_type`,
/// * `Policy::param_type` is `param_type`
/// * `Policy::executor_type` is `OtherExecutor`.
/// \note The given executor's forward progress guarantees must not be weaker than this
/// execution policy's forward progress requirements.
/// \note on() is sugar for the expression `replace_executor(*this, exec)`.
/// \see replace_executor
__agency_exec_check_disable__
template<class OtherExecutor>
__AGENCY_ANNOTATION
auto on(const OtherExecutor& exec) const ->
decltype(replace_executor(*this, exec))
{
// note the intentional use of ADL to call replace_executor()
return replace_executor(*this, exec);
}
/// \brief Reparameterizes this execution policy.
///
///
/// `operator()` returns a new execution policy identical to `*this` but whose
/// parameterization is constructed from the given arguments.
///
/// \param arg1 The first argument to forward to `param_type`'s constructor.
/// \param args The rest of the arguments to forward to `param_type`'s constructor.
/// \return An execution policy equivalent to `*this` but whose parameterization has been constructed from the given arguments.
/// The type of the result is:
/// * `DerivedExecutionPolicy`, when `DerivedExecutionPolicy` is not `void`
/// * `basic_execution_policy<ExecutionAgent,Executor,void>`, otherwise.
///
// this is the flat form of operator()
// XXX consider introducing .reparamterize() that makes it clearer exactly what is going on
template<class Arg1, class... Args>
__AGENCY_ANNOTATION
#ifndef DOXYGEN_SHOULD_SKIP_THIS
typename std::enable_if<
detail::is_flat_call<param_type, Arg1, Args...>::value,
derived_type
>::type
#else
see_below
#endif
operator()(Arg1&& arg1, Args&&... args) const
{
return derived_type{param_type{std::forward<Arg1>(arg1), std::forward<Args>(args)...}, executor()};
}
// XXX maybe .scope() should just take OuterPolicy & InnerPolicy?
// instead of a bunch of args?
// XXX seems like scope() should require at least two arguments
template<class Arg1, class... Args>
detail::scoped_execution_policy<
derived_type,
detail::decay_t<detail::last_type<Arg1,Args...>>
>
scope(Arg1&& arg1, Args&&... args) const
{
// wrap the args in a tuple so we can manipulate them easier
auto arg_tuple = agency::forward_as_tuple(std::forward<Arg1>(arg1), std::forward<Args>(args)...);
// get the arguments to the outer execution policy
auto outer_args = detail::tuple_drop_last(arg_tuple);
// create the outer execution policy
auto outer = detail::tuple_apply(*this, outer_args);
// get the inner execution policy
auto inner = __tu::tuple_last(arg_tuple);
// return the composition of the two policies
return detail::scoped_execution_policy<derived_type,decltype(inner)>(outer, inner);
}
// this is the scoped form of operator()
// it is just sugar for .scope()
template<class Arg1, class... Args>
typename std::enable_if<
detail::is_scoped_call<param_type, Arg1, Args...>::value,
detail::scoped_execution_policy<
derived_type,
detail::decay_t<detail::last_type<Arg1,Args...>>
>
>::type
operator()(Arg1&& arg1, Args&&... args) const
{
return scope(std::forward<Arg1>(arg1), std::forward<Args>(args)...);
}
template<class Arg1, class... Args>
derived_type operator()(std::initializer_list<Arg1> arg1, std::initializer_list<Args>... args) const
{
return derived_type{param_type{std::move(arg1), std::move(args)...}, executor()};
}
protected:
param_type param_;
// executor_ needs to be mutable, because:
// * the global execution policy objects are constexpr
// * executor's member functions are not const
mutable executor_type executor_;
};
namespace detail
{
template<class ExecutionPolicy1, class ExecutionPolicy2>
class scoped_execution_policy
: public basic_execution_policy<
execution_group<
typename ExecutionPolicy1::execution_agent_type,
typename ExecutionPolicy2::execution_agent_type
>,
scoped_executor<
typename ExecutionPolicy1::executor_type,
typename ExecutionPolicy2::executor_type
>,
scoped_execution_policy<ExecutionPolicy1,ExecutionPolicy2>
>
{
private:
using super_t = basic_execution_policy<
execution_group<
typename ExecutionPolicy1::execution_agent_type,
typename ExecutionPolicy2::execution_agent_type
>,
scoped_executor<
typename ExecutionPolicy1::executor_type,
typename ExecutionPolicy2::executor_type
>,
scoped_execution_policy<ExecutionPolicy1,ExecutionPolicy2>
>;
public:
using outer_execution_policy_type = ExecutionPolicy1;
using inner_execution_policy_type = ExecutionPolicy2;
using typename super_t::execution_agent_type;
using typename super_t::executor_type;
scoped_execution_policy(const outer_execution_policy_type& outer,
const inner_execution_policy_type& inner)
: super_t(typename execution_agent_type::param_type(outer.param(), inner.param()),
executor_type(outer.executor(), inner.executor())),
outer_(outer),
inner_(inner)
{}
const outer_execution_policy_type& outer() const
{
return outer_;
}
const inner_execution_policy_type& inner() const
{
return inner_;
}
private:
outer_execution_policy_type outer_;
inner_execution_policy_type inner_;
};
} // end detail
__agency_exec_check_disable__
template<class ExecutionPolicy, class Executor>
__AGENCY_ANNOTATION
typename std::enable_if<
is_executor<Executor>::value,
basic_execution_policy<typename ExecutionPolicy::execution_agent_type, Executor>
>::type
replace_executor(const ExecutionPolicy& policy, const Executor& exec)
{
using policy_category = detail::execution_policy_execution_category_t<ExecutionPolicy>;
using executor_category = executor_execution_category_t<Executor>;
static_assert(detail::is_weaker_than<policy_category, executor_category>::value, "replace_executor(): Execution policy's forward progress requirements cannot be satisfied by executor's guarantees.");
using result_type = basic_execution_policy<
typename ExecutionPolicy::execution_agent_type,
Executor
>;
return result_type(policy.param(), exec);
}
} // end agency
<file_sep>#include <agency/agency.hpp>
#include <agency/execution/executor/detail/utility.hpp>
#include <iostream>
#include "../test_executors.hpp"
template<class Executor>
void test_with_void_predecessor(Executor exec)
{
using index_type = agency::executor_index_t<Executor>;
auto predecessor_future = agency::make_ready_future<void>(exec);
size_t shape = 10;
auto f = agency::detail::bulk_then_execute_with_collected_result(exec,
[](index_type idx, std::vector<int>& shared_arg)
{
return shared_arg[idx];
},
shape,
predecessor_future,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(shape, 13) == result);
}
template<class Executor>
void test_with_non_void_predecessor(Executor exec)
{
using index_type = agency::executor_index_t<Executor>;
auto predecessor_future = agency::make_ready_future<int>(exec, 7);
size_t shape = 10;
auto f = agency::detail::bulk_then_execute_with_collected_result(exec,
[](index_type idx, int& predecessor, std::vector<int>& shared_arg)
{
return predecessor + shared_arg[idx];
},
shape,
predecessor_future,
[=]{ return std::vector<int>(shape); }, // results
[=]{ return std::vector<int>(shape, 13); } // shared_arg
);
auto result = f.get();
assert(std::vector<int>(shape, 7 + 13) == result);
}
int main()
{
test_with_void_predecessor(bulk_synchronous_executor());
test_with_void_predecessor(bulk_asynchronous_executor());
test_with_void_predecessor(bulk_continuation_executor());
test_with_void_predecessor(not_a_bulk_synchronous_executor());
test_with_void_predecessor(not_a_bulk_asynchronous_executor());
test_with_void_predecessor(not_a_bulk_continuation_executor());
test_with_void_predecessor(complete_bulk_executor());
test_with_non_void_predecessor(bulk_synchronous_executor());
test_with_non_void_predecessor(bulk_asynchronous_executor());
test_with_non_void_predecessor(bulk_continuation_executor());
test_with_non_void_predecessor(not_a_bulk_synchronous_executor());
test_with_non_void_predecessor(not_a_bulk_asynchronous_executor());
test_with_non_void_predecessor(not_a_bulk_continuation_executor());
test_with_non_void_predecessor(complete_bulk_executor());
std::cout << "OK" << std::endl;
return 0;
}
<file_sep>#pragma once
#include <agency/cuda/execution/executor/grid_executor.hpp>
#include <agency/cuda/detail/concurrency/block_barrier.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/invoke.hpp>
#include <agency/detail/type_traits.hpp>
namespace agency
{
namespace cuda
{
namespace detail
{
template<class Function>
struct block_executor_helper_functor
{
mutable Function f_;
// this is the form of operator() for bulk_then_execute() with a non-void predecessor future
template<class Predecessor, class Result, class InnerSharedArg>
__device__
void operator()(grid_executor::index_type idx, Predecessor& predecessor, Result& result, agency::detail::unit, InnerSharedArg& inner_shared_arg) const
{
agency::detail::invoke(f_, agency::get<1>(idx), predecessor, result, inner_shared_arg);
}
// this is the form of operator() for bulk_then_execute() with a void predecessor future
template<class Result, class InnerSharedArg>
__device__
void operator()(grid_executor::index_type idx, Result& result, agency::detail::unit, InnerSharedArg& inner_shared_arg) const
{
agency::detail::invoke(f_, agency::get<1>(idx), result, inner_shared_arg);
}
};
} // end detail
class block_executor : private grid_executor
{
private:
using super_t = grid_executor;
public:
using execution_category = concurrent_execution_tag;
using shape_type = std::tuple_element<1, executor_shape_t<super_t>>::type;
using index_type = std::tuple_element<1, executor_index_t<super_t>>::type;
template<class T>
using future = typename super_t::template future<T>;
template<class T>
using allocator = typename super_t::template allocator<T>;
using barrier_type = detail::block_barrier;
using super_t::super_t;
using super_t::make_ready_future;
using super_t::device;
__host__ __device__
shape_type max_shape_dimensions() const
{
return super_t::max_shape_dimensions()[1];
}
template<class Function, class T, class ResultFactory, class SharedFactory,
class = agency::detail::result_of_continuation_t<
Function,
index_type,
async_future<T>,
agency::detail::result_of_t<ResultFactory()>&,
agency::detail::result_of_t<SharedFactory()>&
>
>
async_future<agency::detail::result_of_t<ResultFactory()>>
bulk_then_execute(Function f, shape_type shape, async_future<T>& predecessor, ResultFactory result_factory, SharedFactory shared_factory)
{
// wrap f with a functor which accepts indices which grid_executor produces
auto wrapped_f = detail::block_executor_helper_functor<Function>{f};
// call grid_executor's .bulk_then_execute()
return super_t::bulk_then_execute(wrapped_f, super_t::shape_type{1,shape}, predecessor, result_factory, agency::detail::unit_factory(), shared_factory);
}
};
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/tuple.hpp>
#include <agency/detail/unit.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <utility>
#include <type_traits>
namespace agency
{
namespace detail
{
// construct is a type of Factory
// which creates a T by calling T's constructor with the given Args...
template<class T, class... Args>
class construct
{
public:
__AGENCY_ANNOTATION
construct() : args_() {}
__AGENCY_ANNOTATION
construct(const tuple<Args...>& args)
: args_(args)
{}
__agency_exec_check_disable__
template<size_t... Indices>
__AGENCY_ANNOTATION
T impl(index_sequence<Indices...>) const &
{
return T(agency::get<Indices>(args_)...);
}
__agency_exec_check_disable__
template<size_t... Indices>
__AGENCY_ANNOTATION
T impl(index_sequence<Indices...>) &&
{
return T(agency::get<Indices>(std::move(args_))...);
}
__AGENCY_ANNOTATION
T operator()() const &
{
return impl(make_index_sequence<sizeof...(Args)>());
}
__AGENCY_ANNOTATION
T operator()() &&
{
return std::move(*this).impl(make_index_sequence<sizeof...(Args)>());
}
private:
tuple<Args...> args_;
};
template<class T, class... Args>
__AGENCY_ANNOTATION
construct<T,typename std::decay<Args>::type...> make_construct(Args&&... args)
{
return construct<T,typename std::decay<Args>::type...>(agency::make_tuple(std::forward<Args>(args)...));
}
template<class T>
__AGENCY_ANNOTATION
construct<T,T> make_copy_construct(const T& arg)
{
return make_construct<T>(arg);
}
struct unit_factory : construct<unit> {};
// a moving_factory is a factory which moves an object when it is called
template<class T>
class moving_factory
{
public:
__AGENCY_ANNOTATION
moving_factory(moving_factory&& other) = default;
// this constructor moves other's value into value_
// so, it acts like a move constructor
__AGENCY_ANNOTATION
moving_factory(const moving_factory& other)
: value_(std::move(other.value_))
{}
// XXX this code causes nvcc 8.0 to produce an error message
//
//__agency_exec_check_disable__
//template<class U,
// class = typename std::enable_if<
// std::is_constructible<T,U&&>::value
// >::type>
//__AGENCY_ANNOTATION
//moving_factory(U&& value)
// : value_(std::forward<U>(value))
//{}
// XXX in order to WAR the nvcc 8.0 error above,
// instead of perfectly forwarding the value in,
// move construct it into value_ instead.
__agency_exec_check_disable__
__AGENCY_ANNOTATION
moving_factory(T&& value)
: value_(std::move(value))
{}
__AGENCY_ANNOTATION
T operator()() const
{
return std::move(value_);
}
private:
mutable T value_;
};
template<class T>
__AGENCY_ANNOTATION
moving_factory<decay_t<T>> make_moving_factory(T&& value)
{
return moving_factory<decay_t<T>>(std::forward<T>(value));
}
// a zip_factory is a type of Factory which takes a list of Factories
// and creates a tuple whose elements are the results of the given Factories
template<class... Factories>
struct zip_factory
{
tuple<Factories...> factory_tuple_;
__AGENCY_ANNOTATION
zip_factory(const tuple<Factories...>& factories) : factory_tuple_(factories) {}
template<size_t... Indices>
__AGENCY_ANNOTATION
agency::tuple<
result_of_t<Factories()>...
>
impl(agency::detail::index_sequence<Indices...>)
{
return agency::make_tuple(agency::get<Indices>(factory_tuple_)()...);
}
__AGENCY_ANNOTATION
agency::tuple<
result_of_t<Factories()>...
>
operator()()
{
return impl(index_sequence_for<Factories...>());
}
};
template<class... Factories>
__AGENCY_ANNOTATION
zip_factory<Factories...> make_zip_factory(const tuple<Factories...>& factory_tuple)
{
return zip_factory<Factories...>(factory_tuple);
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/execution/executor/executor_traits/is_synchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_asynchronous_executor.hpp>
#include <agency/execution/executor/executor_traits/is_continuation_executor.hpp>
namespace agency
{
template<class T>
using is_simple_executor = agency::detail::disjunction<
is_synchronous_executor<T>,
is_asynchronous_executor<T>,
is_continuation_executor<T>
>;
namespace detail
{
// a fake Concept to use with __AGENCY_REQUIRES
template<class T>
constexpr bool SimpleExecutor()
{
return is_simple_executor<T>();
}
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <utility>
#include <iterator>
namespace agency
{
namespace experimental
{
namespace detail
{
template<class Incrementable>
class counting_iterator
{
public:
using value_type = Incrementable;
using reference = value_type;
using pointer = void;
using difference_type = decltype(std::declval<Incrementable>() - std::declval<Incrementable>());
// XXX this should check whether Incrementable is_integral
// otherwise it should defer to iterator_traits
using iterator_category = std::random_access_iterator_tag;
__AGENCY_ANNOTATION
counting_iterator() = default;
__AGENCY_ANNOTATION
counting_iterator(const counting_iterator&) = default;
__AGENCY_ANNOTATION
counting_iterator(const Incrementable& value)
: value_(value)
{}
// pre-increment
__AGENCY_ANNOTATION
counting_iterator& operator++()
{
++value_;
return *this;
}
// post-increment
__AGENCY_ANNOTATION
counting_iterator operator++(int)
{
counting_iterator result = *this;
++value_;
return result;
}
// pre-decrement
__AGENCY_ANNOTATION
counting_iterator& operator--()
{
--value_;
return *this;
}
// post-decrement
__AGENCY_ANNOTATION
counting_iterator operator--(int)
{
counting_iterator result = *this;
--value_;
return result;
}
// plus-assign
__AGENCY_ANNOTATION
counting_iterator& operator+=(difference_type n)
{
value_ += n;
return *this;
}
// plus
__AGENCY_ANNOTATION
counting_iterator operator+(difference_type n) const
{
counting_iterator result = *this;
result += n;
return result;
}
// minus-assign
__AGENCY_ANNOTATION
counting_iterator& operator-=(difference_type n)
{
value_ -= n;
return *this;
}
// minus
__AGENCY_ANNOTATION
counting_iterator operator-(difference_type n) const
{
counting_iterator result = *this;
result -= n;
return result;
}
// iterator difference
__AGENCY_ANNOTATION
difference_type operator-(const counting_iterator& rhs) const
{
return value_ - rhs.value_;
}
// dereference
__AGENCY_ANNOTATION
reference operator*() const
{
return value_;
}
// bracket
__AGENCY_ANNOTATION
reference operator[](difference_type i) const
{
auto tmp = *this;
tmp += i;
return *tmp;
}
// less
__AGENCY_ANNOTATION
bool operator<(const counting_iterator& other) const
{
return value_ < other.value_;
}
// less equal
__AGENCY_ANNOTATION
bool operator<=(const counting_iterator& other) const
{
return value_ <= other.value_;
}
// greater
__AGENCY_ANNOTATION
bool operator>(const counting_iterator& other) const
{
return value_ > other.value_;
}
// greater equal
__AGENCY_ANNOTATION
bool operator>=(const counting_iterator& other) const
{
return value_ >= other.value_;
}
// equal
__AGENCY_ANNOTATION
bool operator==(const counting_iterator& other) const
{
return value_ == other.value_;
}
// not equal
__AGENCY_ANNOTATION
bool operator!=(const counting_iterator& other) const
{
return value_ != other.value_;
}
private:
Incrementable value_;
};
} // end detail
template<class Incrementable>
class iota_view
{
public:
using iterator = detail::counting_iterator<Incrementable>;
using difference_type = typename std::iterator_traits<iterator>::difference_type;
__AGENCY_ANNOTATION
iota_view(Incrementable begin, Incrementable end)
: begin_(begin),
end_(end)
{}
__AGENCY_ANNOTATION
iterator begin() const
{
return begin_;
}
__AGENCY_ANNOTATION
iterator end() const
{
return end_;
}
__AGENCY_ANNOTATION
difference_type size() const
{
return end() - begin();
}
__AGENCY_ANNOTATION
bool empty() const
{
return size() == 0;
}
__AGENCY_ANNOTATION
typename std::iterator_traits<iterator>::reference operator[](difference_type i) const
{
return begin()[i];
}
private:
iterator begin_;
iterator end_;
};
template<class Incrementable, class OtherIncrementable,
__AGENCY_REQUIRES(
std::is_convertible<
OtherIncrementable, Incrementable
>::value
)>
__AGENCY_ANNOTATION
iota_view<Incrementable> iota(Incrementable begin, OtherIncrementable end)
{
return iota_view<Incrementable>(begin, end);
}
template<class Incrementable>
__AGENCY_ANNOTATION
iota_view<Incrementable> all(const iota_view<Incrementable>& v)
{
// iota_view is already a view, so just return a copy of the parameter
return v;
}
} // end experimental
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/default_shape.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/executor_traits/detail/member_shape_type_or.hpp>
#include <cstddef>
namespace agency
{
namespace detail
{
template<class BulkExecutor, bool Enable = is_bulk_executor<BulkExecutor>::value>
struct executor_shape_impl
{
};
template<class BulkExecutor>
struct executor_shape_impl<BulkExecutor,true>
{
using type = member_shape_type_or_t<BulkExecutor,default_shape_t<1>>;
};
} // end detail
template<class BulkExecutor>
struct executor_shape : detail::executor_shape_impl<BulkExecutor> {};
template<class BulkExecutor>
using executor_shape_t = typename executor_shape<BulkExecutor>::type;
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/type_traits.hpp>
namespace agency
{
namespace detail
{
template<class T, class Default>
struct member_execution_category_or
{
template<class U>
using helper = typename U::execution_category;
using type = detected_or_t<Default, helper, T>;
};
template<class T, class Default>
using member_execution_category_or_t = typename member_execution_category_or<T,Default>::type;
} // end detail
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits/is_bulk_executor.hpp>
#include <agency/execution/executor/executor_traits/detail/executor_execution_depth_or.hpp>
namespace agency
{
namespace detail
{
template<class BulkExecutor, bool Enable = is_bulk_executor<BulkExecutor>::value>
struct executor_execution_depth_impl;
template<class BulkExecutor>
struct executor_execution_depth_impl<BulkExecutor,true>
: executor_execution_depth_or<BulkExecutor>
{};
} // end detail
template<class BulkExecutor>
struct executor_execution_depth : detail::executor_execution_depth_impl<BulkExecutor> {};
} // end agency
<file_sep>#include <agency/detail/config.hpp>
#include <agency/future/future_traits/future_rebind_value.hpp>
#include <agency/future/future_traits/future_then_result.hpp>
#include <agency/future/future_traits/future_value.hpp>
#include <agency/future/future_traits/is_future.hpp>
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/execution/executor/executor_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/tuple.hpp>
#include <agency/execution/executor/customization_points/future_cast.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_void_result.hpp>
#include <agency/execution/executor/detail/utility/bulk_then_execute_with_collected_result.hpp>
#include <agency/detail/control_structures/executor_functions/bind_agent_local_parameters.hpp>
#include <agency/detail/control_structures/executor_functions/bulk_async_executor.hpp>
#include <agency/detail/control_structures/executor_functions/result_factory.hpp>
#include <agency/detail/control_structures/scope_result.hpp>
#include <agency/detail/control_structures/decay_parameter.hpp>
#include <agency/detail/type_traits.hpp>
#include <type_traits>
namespace agency
{
namespace detail
{
// this overload handles the general case where the user function returns a normal result
template<class E, class Function, class ResultFactory, class Future, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
executor_future_t<E, result_of_t<ResultFactory()>>
bulk_then_executor_impl(E& exec,
Function f,
ResultFactory result_factory,
executor_shape_t<E> shape,
Future& predecessor,
Tuple&& shared_factory_tuple,
detail::index_sequence<TupleIndices...>)
{
return detail::bulk_then_execute_with_collected_result(exec, f, shape, predecessor, result_factory, agency::get<TupleIndices>(std::forward<Tuple>(shared_factory_tuple))...);
}
// this overload handles the special case where the user function returns a scope_result
// the reason we need this case cannot be handled by the overload above is because, unlike the above case,
// there is an intermediate future which must be converted to the right type of result fututre
template<class E, class Function, size_t scope, class T, class Future, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
executor_future_t<E, typename detail::scope_result_container<scope,T,E>::result_type>
bulk_then_executor_impl(E& exec,
Function f,
construct<detail::scope_result_container<scope,T,E>, executor_shape_t<E>> result_factory,
executor_shape_t<E> shape,
Future& predecessor,
Tuple&& shared_factory_tuple,
detail::index_sequence<TupleIndices...>)
{
auto intermediate_future = bulk_then_execute_with_collected_result(exec, f, shape, predecessor, result_factory, agency::get<TupleIndices>(std::forward<Tuple>(shared_factory_tuple))...);
using result_type = typename detail::scope_result_container<scope,T,E>::result_type;
// cast the intermediate_future to result_type
return agency::future_cast<result_type>(exec, intermediate_future);
}
// this overload handles the special case where the user function returns void
template<class E, class Function, class Future, class Tuple, size_t... TupleIndices>
__AGENCY_ANNOTATION
executor_future_t<E, void>
bulk_then_executor_impl(E& exec,
Function f,
void_factory,
executor_shape_t<E> shape,
Future& predecessor,
Tuple&& shared_factory_tuple,
detail::index_sequence<TupleIndices...>)
{
return detail::bulk_then_execute_with_void_result(exec, f, shape, predecessor, agency::get<TupleIndices>(std::forward<Tuple>(shared_factory_tuple))...);
}
// XXX upon c++14, just return auto from these functions
template<class Result, class Future, class Function>
struct unpack_shared_parameters_from_then_execute_and_invoke
{
mutable Function f;
// this overload of impl() handles the case when the future given to then_execute() is non-void
template<size_t... TupleIndices, class Index, class PastArg, class Tuple>
__AGENCY_ANNOTATION
Result impl(detail::index_sequence<TupleIndices...>, const Index& idx, PastArg& past_arg, Tuple&& tuple_of_shared_args) const
{
return f(idx, past_arg, agency::get<TupleIndices>(tuple_of_shared_args)...);
}
// this overload of impl() handles the case when the future given to then_execute() is void
template<size_t... TupleIndices, class Index, class Tuple>
__AGENCY_ANNOTATION
Result impl(detail::index_sequence<TupleIndices...>, const Index& idx, Tuple&& tuple_of_shared_args) const
{
return f(idx, agency::get<TupleIndices>(tuple_of_shared_args)...);
}
// this overload of operator() handles the case when the future given to then_execute() is non-void
template<class Index, class PastArg, class... Types,
class Future1 = Future,
class = typename std::enable_if<
is_non_void_future<Future1>::value
>::type>
__AGENCY_ANNOTATION
Result operator()(const Index& idx, PastArg& past_arg, Types&... packaged_shared_args) const
{
// unpack the packaged shared parameters into a tuple
auto tuple_of_shared_args = detail::unpack_shared_parameters_from_executor(packaged_shared_args...);
return impl(detail::make_tuple_indices(tuple_of_shared_args), idx, past_arg, tuple_of_shared_args);
}
// this overload of operator() handles the case when the future given to then_execute() is void
template<class Index, class... Types,
class Future1 = Future,
class = typename std::enable_if<
is_void_future<Future1>::value
>::type>
__AGENCY_ANNOTATION
Result operator()(const Index& idx, Types&... packaged_shared_args) const
{
// unpack the packaged shared parameters into a tuple
auto tuple_of_shared_args = detail::unpack_shared_parameters_from_executor(packaged_shared_args...);
return impl(detail::make_tuple_indices(tuple_of_shared_args), idx, tuple_of_shared_args);
}
};
template<class Result, class Future, class Function>
__AGENCY_ANNOTATION
unpack_shared_parameters_from_then_execute_and_invoke<Result, Future, Function> make_unpack_shared_parameters_from_then_execute_and_invoke(Function f)
{
return unpack_shared_parameters_from_then_execute_and_invoke<Result, Future, Function>{f};
}
// computes the result type of bulk_then(executor)
template<class Executor, class Function, class Future, class... Args>
struct bulk_then_executor_result
{
// figure out the Future's value_type
using future_value_type = typename future_traits<Future>::value_type;
// assemble a list of template parameters for bulk_async_executor_result
// when Future is a void future, we don't want to include it in the list
using template_parameters = typename std::conditional<
is_void_future<Future>::value,
type_list<Executor,Function,Args...>,
type_list<Executor,Function,Future,Args...>
>::type;
// to compute the result of bulk_then_executor(), instantiate
// bulk_async_executor_result_t with the list of template parameters
using type = type_list_instantiate<bulk_async_executor_result_t, template_parameters>;
};
template<class Executor, class Function, class Future, class... Args>
using bulk_then_executor_result_t = typename bulk_then_executor_result<Executor,Function,Future,Args...>::type;
template<class Future,
class Function,
class... Args,
class = typename std::enable_if<
is_non_void_future<Future>::value
>::type>
__AGENCY_ANNOTATION
auto bind_agent_local_parameters_for_bulk_then(Function f, Args&&... args) ->
decltype(detail::bind_agent_local_parameters_workaround_nvbug1754712(std::integral_constant<size_t,2>(), f, detail::placeholders::_1, detail::placeholders::_2, std::forward<Args>(args)...))
{
// the _1 is for the executor idx parameter, which is the first parameter passed to f
// the _2 is for the future parameter, which is the second parameter passed to f
// the agent local parameters begin at index 2
return detail::bind_agent_local_parameters_workaround_nvbug1754712(std::integral_constant<size_t,2>(), f, detail::placeholders::_1, detail::placeholders::_2, std::forward<Args>(args)...);
}
template<class Future,
class Function,
class... Args,
class = typename std::enable_if<
is_void_future<Future>::value
>::type>
__AGENCY_ANNOTATION
auto bind_agent_local_parameters_for_bulk_then(Function f, Args&&... args) ->
decltype(detail::bind_agent_local_parameters_workaround_nvbug1754712(std::integral_constant<size_t,1>(), f, detail::placeholders::_1, std::forward<Args>(args)...))
{
// the _1 is for the executor idx parameter, which is the first parameter passed to f
// the Future is void, so we don't have to reserve a parameter slot for its (non-existent) value
// the agent local parameters begin at index 1
return detail::bind_agent_local_parameters_workaround_nvbug1754712(std::integral_constant<size_t,1>(), f, detail::placeholders::_1, std::forward<Args>(args)...);
}
template<class Executor, class Function, class Future, class... Args>
__AGENCY_ANNOTATION
bulk_then_executor_result_t<Executor,Function,Future,Args...>
bulk_then_executor(Executor& exec, executor_shape_t<Executor> shape, Function f, Future& fut, Args&&... args)
{
// bind f and the agent local parameters in args... into a functor g
auto g = detail::bind_agent_local_parameters_for_bulk_then<Future>(f, std::forward<Args>(args)...);
// make a tuple of the shared args
auto shared_arg_tuple = detail::forward_shared_parameters_as_tuple(std::forward<Args>(args)...);
// package up the shared parameters for the executor
const size_t execution_depth = executor_execution_depth<Executor>::value;
// create a tuple of factories to use for shared parameters for the executor
auto factory_tuple = agency::detail::make_shared_parameter_factory_tuple<execution_depth>(shared_arg_tuple);
// compute the type of f's result
using result_of_f = detail::result_of_continuation_t<Function,executor_index_t<Executor>,Future,decay_parameter_t<Args>...>;
// unpack shared parameters we receive from the executor
auto h = detail::make_unpack_shared_parameters_from_then_execute_and_invoke<result_of_f,Future>(g);
// based on the type of f's result, make a factory that will create the appropriate type of container to store f's results
auto result_factory = detail::make_result_factory<result_of_f>(exec, shape);
return detail::bulk_then_executor_impl(exec, h, result_factory, shape, fut, factory_tuple, detail::make_index_sequence<execution_depth>());
}
} // end detail
} // end agency
<file_sep>/// \file
/// \brief Include this file to use bulk_async().
///
#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/control_structures/bulk_async_execution_policy.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/detail/control_structures/is_bulk_call_possible_via_execution_policy.hpp>
#include <agency/execution/execution_agent.hpp>
namespace agency
{
namespace detail
{
template<bool enable, class ExecutionPolicy, class Function, class... Args>
struct enable_if_bulk_async_execution_policy_impl {};
template<class ExecutionPolicy, class Function, class... Args>
struct enable_if_bulk_async_execution_policy_impl<true, ExecutionPolicy, Function, Args...>
{
using type = bulk_async_execution_policy_result_t<ExecutionPolicy,Function,Args...>;
};
template<class ExecutionPolicy, class Function, class... Args>
struct enable_if_bulk_async_execution_policy
: enable_if_bulk_async_execution_policy_impl<
is_bulk_call_possible_via_execution_policy<decay_t<ExecutionPolicy>,Function,Args...>::value,
decay_t<ExecutionPolicy>,
Function,
Args...
>
{};
} // end detail
/// \brief Creates a bulk asynchronous invocation.
/// \ingroup control_structures
///
///
/// `bulk_async` is a control structure which asynchronously creates a group of function invocations with forward progress ordering as required by an execution policy.
/// The results of these invocations, if any, are collected into a container and returned as `bulk_async`'s asynchronous result.
/// A future object corresponding to the eventual availability of this container is returned as `bulk_async`'s result.
///
/// `bulk_async` asynchronously creates a group of function invocations of size `N`, and each invocation i in `[0,N)` has the following form:
///
/// result_i = f(agent_i, arg_i_1, arg_i_2, ..., arg_i_M)
///
/// `agent_i` is a reference to an **execution agent** which identifies the ith invocation within the group.
/// The parameter `arg_i_j` depends on the `M` arguments `arg_j` passed to `bulk_async`:
/// * If `arg_j` is a **shared parameter**, then it is a reference to an object shared among all execution agents in `agent_i`'s group.
/// * Otherwise, `arg_i_j` is a copy of argument `arg_j`.
///
/// If the invocations of `f` do not return `void`, these results are collected and returned in a container `results`, whose type is implementation-defined.
/// If invocation i returns `result_i`, and this invocation's `agent_i` has index `idx_i`, then `results[idx_i]` yields `result_i`.
///
/// \param policy An execution policy describing the requirements of the execution agents created by this call to `bulk_async`.
/// \param f A function defining the work to be performed by execution agents.
/// \param args Additional arguments to pass to `f` when it is invoked.
/// \return A `void` future object, if `f` has no result; otherwise, a future object corresponding to the eventually available container of `f`'s results indexed by the execution agent which produced them.
/// \note The type of future object returned by `bulk_async` is a property of the type of `ExecutionPolicy` used as a parameter.
///
/// \tparam ExecutionPolicy This type must fulfill the requirements of `ExecutionPolicy`.
/// \tparam Function `Function`'s first parameter type must be `ExecutionPolicy::execution_agent_type&`.
/// The types of its additional parameters must match `Args...`.
/// \tparam Args Each type in `Args...` must match the type of the corresponding parameter of `Function`.
///
/// The following example demonstrates how to use `bulk_async` to create tasks which execute asynchronously with the caller.
///
/// \include hello_async.cpp
///
/// Messages from the agents in the two asynchronous tasks are printed while the main thread sleeps:
///
/// ~~~~
/// $ clang -std=c++11 -I. -lstdc++ -pthread examples/hello_async.cpp -o hello_async
/// $ ./hello_async
/// Starting two tasks asynchronously...
/// Sleeping before waiting on the tasks...
/// Hello, world from agent 0 in task 1
/// Hello, world from agent 1 in task 1
/// Hello, world from agent 2 in task 1
/// Hello, world from agent 3 in task 1
/// Hello, world from agent 4 in task 1
/// Hello, world from agent 0 in task 2
/// Hello, world from agent 1 in task 2
/// Hello, world from agent 2 in task 2
/// Hello, world from agent 3 in task 2
/// Hello, world from agent 4 in task 2
/// Woke up, waiting for the tasks to complete...
/// OK
/// ~~~~
///
/// \see bulk_invoke
/// \see bulk_then
template<class ExecutionPolicy, class Function, class... Args>
__AGENCY_ANNOTATION
#ifndef DOXYGEN_SHOULD_SKIP_THIS
typename detail::enable_if_bulk_async_execution_policy<
ExecutionPolicy, Function, Args...
>::type
#else
see_below
#endif
bulk_async(ExecutionPolicy&& policy, Function f, Args&&... args)
{
using agent_traits = execution_agent_traits<typename std::decay<ExecutionPolicy>::type::execution_agent_type>;
const size_t num_shared_params = detail::execution_depth<typename agent_traits::execution_category>::value;
return detail::bulk_async_execution_policy(detail::index_sequence_for<Args...>(), detail::make_index_sequence<num_shared_params>(), policy, f, std::forward<Args>(args)...);
}
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/memory/detail/resource/malloc_resource.hpp>
#include <agency/memory/allocator/detail/allocator_traits.hpp>
#include <agency/memory/allocator/detail/allocator_adaptor.hpp>
#include <agency/cuda/memory/resource/heterogeneous_resource.hpp>
#include <agency/cuda/memory/resource/managed_resource.hpp>
#include <memory>
namespace agency
{
namespace cuda
{
// heterogeneous_allocator uses a different primitive resource depending on whether
// its operations are called from __host__ or __device__ code.
template<class T, class HostResource = managed_resource, class DeviceResource = agency::detail::malloc_resource>
class heterogeneous_allocator : public agency::detail::allocator_adaptor<T,heterogeneous_resource<HostResource,DeviceResource>>
{
private:
using super_t = agency::detail::allocator_adaptor<T,heterogeneous_resource<HostResource,DeviceResource>>;
public:
__host__ __device__
heterogeneous_allocator() = default;
__host__ __device__
heterogeneous_allocator(const heterogeneous_allocator&) = default;
template<class U>
__host__ __device__
heterogeneous_allocator(const heterogeneous_allocator<U,HostResource,DeviceResource>& other)
: super_t(other)
{}
}; // end heterogeneous_allocator
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/requires.hpp>
#include <agency/detail/algorithm/copy_n.hpp>
#include <agency/execution/execution_policy.hpp>
#include <agency/detail/iterator/iterator_traits.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/tuple.hpp>
#include <agency/cuda/future.hpp>
#include <agency/cuda/detail/terminate.hpp>
#include <iterator>
namespace agency
{
namespace cuda
{
namespace detail
{
namespace copy_n_detail
{
template<class Iterator1, class Iterator2>
using iterator_values_are_trivially_copyable = agency::detail::conjunction<
std::is_same<typename std::iterator_traits<Iterator1>::value_type, typename std::iterator_traits<Iterator2>::value_type>,
agency::detail::iterator_value_is_trivially_copyable<Iterator1>
>;
// this is the implementation of copy_n for contiguous, trivially copyable types
template<class ExecutionPolicy, class RandomAccessIterator1, class Size, class RandomAccessIterator2,
__AGENCY_REQUIRES(
iterator_values_are_trivially_copyable<RandomAccessIterator1,RandomAccessIterator2>::value
)>
agency::tuple<RandomAccessIterator1,RandomAccessIterator2> copy_n(ExecutionPolicy&& policy, RandomAccessIterator1 first, Size n, RandomAccessIterator2 result)
{
// XXX we leak this stream if we throw an exception below
cudaStream_t stream = experimental::make_dependent_stream(cuda::make_ready_async_future());
using value_type = typename std::iterator_traits<RandomAccessIterator1>::value_type;
const value_type* source = &*first;
value_type* dest = &*result;
// note our use of cudaMemcpyAsync avoids synchronizing the entire system, unlike cudaMemcpy
detail::throw_on_error(cudaMemcpyAsync(dest, source, n * sizeof(value_type), cudaMemcpyDefault, stream), "cuda::copy_n(): After cudaMemcpyAsync()");
detail::throw_on_error(cudaStreamSynchronize(stream), "cuda::copy_n(): After cudaStreamSynchronize()");
detail::throw_on_error(cudaStreamDestroy(stream), "cuda::copy_n(): After cudaStreamDestroy()");
return agency::make_tuple(first + n, result + n);
}
// this is the implementation of copy_n for iterators which are not contiguous nor trivially copyable
template<class ExecutionPolicy, class RandomAccessIterator1, class Size, class RandomAccessIterator2,
__AGENCY_REQUIRES(
!iterator_values_are_trivially_copyable<RandomAccessIterator1,RandomAccessIterator2>::value
)>
agency::tuple<RandomAccessIterator1,RandomAccessIterator2> copy_n(ExecutionPolicy&& policy, RandomAccessIterator1 first, Size n, RandomAccessIterator2 result)
{
return agency::detail::default_copy_n(std::forward<ExecutionPolicy>(policy), first, n, result);
}
} // end copy_n_detail
} // end detail
template<class ExecutionPolicy, class InputIterator, class Size, class OutputIterator>
agency::tuple<InputIterator,OutputIterator> copy_n(ExecutionPolicy&& policy, InputIterator first, Size n, OutputIterator result)
{
return detail::copy_n_detail::copy_n(std::forward<ExecutionPolicy>(policy), first, n, result);
}
} // end cuda
} // end agency
<file_sep>#pragma once
#include <agency/detail/config.hpp>
#include <agency/detail/integer_sequence.hpp>
#include <agency/detail/type_traits.hpp>
#include <agency/tuple.hpp>
#include <functional>
#include <type_traits>
#include <utility>
#include <tuple>
namespace agency
{
namespace detail
{
namespace bind_detail
{
__agency_exec_check_disable__
template<typename F, typename Tuple, size_t... I>
__AGENCY_ANNOTATION
auto apply_impl(F&& f, Tuple&& t, agency::detail::index_sequence<I...>)
-> decltype(
std::forward<F>(f)(
agency::get<I>(std::forward<Tuple>(t))...
)
)
{
return std::forward<F>(f)(
agency::get<I>(std::forward<Tuple>(t))...
);
}
template<typename F, typename Tuple>
__AGENCY_ANNOTATION
auto apply(F&& f, Tuple&& t)
-> decltype(
apply_impl(
std::forward<F>(f),
std::forward<Tuple>(t),
agency::detail::make_index_sequence<std::tuple_size<decay_t<Tuple>>::value>()
)
)
{
using Indices = agency::detail::make_index_sequence<std::tuple_size<decay_t<Tuple>>::value>;
return apply_impl(
std::forward<F>(f),
std::forward<Tuple>(t),
Indices()
);
}
template<class ArgTuple, class BoundArg,
class = typename std::enable_if<
(std::is_placeholder<decay_t<BoundArg>>::value == 0)
>::type>
__AGENCY_ANNOTATION
auto substitute_arg(ArgTuple&&, BoundArg&& bound_arg)
-> decltype(
std::forward<BoundArg>(bound_arg)
)
{
return std::forward<BoundArg>(bound_arg);
}
template<class ArgTuple, class BoundArg,
class = typename std::enable_if<
(std::is_placeholder<BoundArg>::value > 0)
>::type>
__AGENCY_ANNOTATION
auto substitute_arg(ArgTuple&& arg_tuple, const BoundArg&)
-> decltype(
agency::get<
static_cast<size_t>(std::is_placeholder<BoundArg>::value) - 1
>(std::forward<ArgTuple>(arg_tuple))
)
{
return agency::get<
static_cast<size_t>(std::is_placeholder<BoundArg>::value) - 1
>(std::forward<ArgTuple>(arg_tuple));
}
template<class ArgTuple, class BoundArgTuple, size_t... I>
__AGENCY_ANNOTATION
auto substitute_impl(ArgTuple&& arg_tuple, BoundArgTuple&& bound_arg_tuple, agency::detail::index_sequence<I...>)
-> decltype(
agency::forward_as_tuple(
substitute_arg(
std::forward<ArgTuple>(arg_tuple),
agency::get<I>(std::forward<BoundArgTuple>(bound_arg_tuple))
)...
)
)
{
return agency::forward_as_tuple(
substitute_arg(
std::forward<ArgTuple>(arg_tuple),
agency::get<I>(std::forward<BoundArgTuple>(bound_arg_tuple))
)...
);
}
template<class ArgTuple, class BoundArgTuple>
__AGENCY_ANNOTATION
auto substitute(ArgTuple&& arg_tuple, BoundArgTuple&& bound_arg_tuple)
-> decltype(
substitute_impl(
std::forward<ArgTuple>(arg_tuple),
std::forward<BoundArgTuple>(bound_arg_tuple),
agency::detail::make_index_sequence<std::tuple_size<decay_t<BoundArgTuple>>::value>()
)
)
{
using Indices = agency::detail::make_index_sequence<std::tuple_size<decay_t<BoundArgTuple>>::value>;
return substitute_impl(std::forward<ArgTuple>(arg_tuple), std::forward<BoundArgTuple>(bound_arg_tuple), Indices());
}
template<class F, class... BoundArgs>
class bind_expression
{
private:
F fun_;
tuple<BoundArgs...> bound_args_;
public:
__AGENCY_ANNOTATION
bind_expression(const F& f, const BoundArgs&... bound_args)
: fun_(f),
bound_args_(bound_args...)
{}
template<class... OtherArgs>
__AGENCY_ANNOTATION
auto operator()(OtherArgs&&... args)
-> decltype(
apply(
fun_,
substitute(
agency::forward_as_tuple(std::forward<OtherArgs>(args)...),
bound_args_
)
)
)
{
return apply(
fun_,
substitute(
agency::forward_as_tuple(std::forward<OtherArgs>(args)...),
bound_args_
)
);
}
template<class... OtherArgs>
__AGENCY_ANNOTATION
auto operator()(OtherArgs&&... args) const
-> decltype(
apply(
fun_,
substitute(
agency::forward_as_tuple(std::forward<OtherArgs>(args)...),
bound_args_
)
)
)
{
return apply(
fun_,
substitute(
agency::forward_as_tuple(std::forward<OtherArgs>(args)...),
bound_args_
)
);
}
};
} // end bind_detail
template<class F, class... BoundArgs>
__AGENCY_ANNOTATION
bind_detail::bind_expression<decay_t<F>, decay_t<BoundArgs>...>
bind(F&& f, BoundArgs&&... bound_args)
{
using namespace bind_detail;
return bind_expression<decay_t<F>,decay_t<BoundArgs>...>(std::forward<F>(f), std::forward<BoundArgs>(bound_args)...);
}
template<int I>
struct placeholder {};
namespace placeholders
{
#ifndef __CUDA_ARCH__
constexpr placeholder<0> _1{};
#else
static const __device__ placeholder<0> _1{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<1> _2{};
#else
static const __device__ placeholder<1> _2{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<2> _3{};
#else
static const __device__ placeholder<2> _3{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<3> _4{};
#else
static const __device__ placeholder<3> _4{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<4> _5{};
#else
static const __device__ placeholder<4> _5{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<5> _6{};
#else
static const __device__ placeholder<5> _6{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<6> _7{};
#else
static const __device__ placeholder<6> _7{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<7> _8{};
#else
static const __device__ placeholder<7> _8{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<8> _9{};
#else
static const __device__ placeholder<8> _9{};
#endif
#ifndef __CUDA_ARCH__
constexpr placeholder<9> _10{};
#else
static const __device__ placeholder<9> _10{};
#endif
} // end placeholders
} // end detail
} // end agency
namespace std
{
// XXX not sure we require this specialization since we don't actually use std::bind() for anything
template<int I>
struct is_placeholder<agency::detail::placeholder<I>> : std::integral_constant<int,I+1> {};
} // end std
<file_sep>#include <iostream>
#include <type_traits>
#include <vector>
#include <cassert>
#include <agency/execution/executor/executor_array.hpp>
#include <agency/tuple.hpp>
#include "test_executors.hpp"
template<class OuterExecutor, class InnerExecutor>
void test(OuterExecutor, InnerExecutor inner_exec)
{
using namespace agency;
using executor_array_type = agency::executor_array<InnerExecutor,OuterExecutor>;
static_assert(is_bulk_continuation_executor<executor_array_type>::value,
"executor_array should be a bulk continuation executor");
using expected_category = scoped_execution_tag<executor_execution_category_t<OuterExecutor>, executor_execution_category_t<InnerExecutor>>;
static_assert(detail::is_detected_exact<expected_category, executor_execution_category_t, executor_array_type>::value,
"scoped_executor should have expected_category execution_category");
static_assert(detail::is_detected_exact<tuple<size_t,size_t>, executor_shape_t, executor_array_type>::value,
"executor_array should have detail::tuple<size_t,size_t> shape_type");
static_assert(detail::is_detected_exact<detail::index_tuple<size_t,size_t>, executor_index_t, executor_array_type>::value,
"executor_array should have detail::index_tuple<size_t,size_t> index_type");
static_assert(detail::is_detected_exact<executor_future_t<OuterExecutor,int>, executor_future_t, executor_array_type, int>::value,
"executor_array should have the same future type as OuterExecutor");
static_assert(detail::is_detected_exact<executor_allocator_t<OuterExecutor,int>, executor_allocator_t, executor_array_type, int>::value,
"executor_array should have the same allocator type as OuterExecutor");
executor_array_type exec(10, inner_exec);
using shape_type = executor_shape_t<executor_array_type>;
using index_type = executor_index_t<executor_array_type>;
using result_type = agency::experimental::basic_ndarray<int, shape_type, agency::executor_allocator_t<executor_array_type, int>>;
{
// test .bulk_then_execute() with non-void predecessor
shape_type shape(10,10);
std::future<int> predecessor_fut = make_ready_future<int>(exec, 7);
auto f = exec.bulk_then_execute(
[=](index_type idx, int& predecessor, result_type& results, std::vector<int>& outer_shared_arg, std::vector<int>& inner_shared_arg)
{
auto outer_idx = agency::get<0>(idx);
auto inner_idx = agency::get<1>(idx);
results[idx] = predecessor + outer_shared_arg[outer_idx] + inner_shared_arg[inner_idx];
},
shape,
predecessor_fut,
[=]{ return result_type(shape); }, // results
[=]{ return std::vector<int>(agency::get<0>(shape), 13); }, // outer_shared_arg
[=]{ return std::vector<int>(agency::get<1>(shape), 42); } // inner_shared_arg
);
auto result = f.get();
assert(result_type(shape, 7 + 13 + 42) == result);
}
{
// test .bulk_then_execute() with void predecessor
shape_type shape(10,10);
std::future<void> predecessor_fut = make_ready_future<void>(exec);
auto f = exec.bulk_then_execute(
[=](index_type idx, result_type& results, std::vector<int>& outer_shared_arg, std::vector<int>& inner_shared_arg)
{
auto outer_idx = agency::get<0>(idx);
auto inner_idx = agency::get<1>(idx);
results[idx] = outer_shared_arg[outer_idx] + inner_shared_arg[inner_idx];
},
shape,
predecessor_fut,
[=]{ return result_type(shape); }, // results
[=]{ return std::vector<int>(agency::get<0>(shape), 13); }, // outer_shared_arg
[=]{ return std::vector<int>(agency::get<1>(shape), 42); } // inner_shared_arg
);
auto result = f.get();
assert(result_type(shape, 13 + 42) == result);
}
}
int main()
{
test(bulk_continuation_executor(), bulk_continuation_executor());
test(bulk_continuation_executor(), bulk_synchronous_executor());
test(bulk_continuation_executor(), bulk_asynchronous_executor());
test(bulk_synchronous_executor(), bulk_continuation_executor());
test(bulk_synchronous_executor(), bulk_synchronous_executor());
test(bulk_synchronous_executor(), bulk_asynchronous_executor());
test(bulk_asynchronous_executor(), bulk_continuation_executor());
test(bulk_asynchronous_executor(), bulk_synchronous_executor());
test(bulk_asynchronous_executor(), bulk_asynchronous_executor());
std::cout << "OK" << std::endl;
return 0;
}
| 6f9b2a506aab60d5a4e722c055d5fc9cf202fbec | [
"Markdown",
"Python",
"C++"
] | 178 | C++ | brycelelbach/agency | aef6649ea4e8cce334dd47c346d42b3c9eaf1222 | b0427a3f695b03990803edc3707004ae809e9c23 |
refs/heads/master | <file_sep>test_that("normalizeDistanceEffect behaves as expected", {
data(exampleHiCDOCDataSet)
object <- reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = c("X"))
object <- filterSparseReplicates(object)
object <- filterWeakPositions(object)
# Apply normalization
set.seed(123)
expect_message(norm <- normalizeDistanceEffect(object))
# Filtering 0 values before normalisation
expect_equal(length(norm), length(object))
matAssay <- SummarizedExperiment::assay(norm)
expect_equal(sum(!is.na(matAssay)), 35105)
expect_equal(round(colSums(matAssay, na.rm=TRUE),3),
c(-3004.957, -1585.685, 0, 0, 1164.882, 1328.834, 2160.781))
})
<file_sep>#### chromosomes ####
#' Retrieves the vector of chromosome names.
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("chromosomes", "HiCDOCDataSet", function(object) {
object@chromosomes
})
#### sampleConditions ####
#' Retrieves the vector of condition names, one for each sample.
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("sampleConditions", "HiCDOCDataSet", function(object) {
object$condition
})
#### sampleReplicates ####
#' Retrieves the vector of replicate names, one for each sample.
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("sampleReplicates", "HiCDOCDataSet", function(object) {
object$replicate
})
#### compartments ####
#' Retrieves a \code{GenomicRange} of the compartment of every position
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("compartments", "HiCDOCDataSet", function(object, passChecks = TRUE) {
if(passChecks == TRUE){
compartments <- object@compartments
compartments[compartments$assignment.check == TRUE &
compartments$centroid.check == TRUE &
compartments$assignment.check == TRUE]
} else {
object@compartments
}
})
#### differences ####
#' Retrieves a \code{GenomicRange} of the significant compartment differences
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("differences", "HiCDOCDataSet", function(object, threshold = NULL) {
if (is.null(object@differences)) {
return(NULL)
}
if (
!is.null(threshold) && (
!is.numeric(threshold) ||
length(threshold) > 1
)
) {
stop("'threshold' must be a number.", call. = FALSE)
}
differences <- object@differences
if (!is.null(threshold)) {
differences <- differences[differences$pvalue.adjusted <= threshold]
}
if (length(differences) == 0) {
if (is.null(threshold)) {
message("No differences found.")
} else {
message(
"No differences found with adjusted p-value <= ",
threshold,
"."
)
}
return(NULL)
}
return(differences)
})
#### concordances ####
#' Retrieves a \code{GenomicRange} of the concordance (confidence in assigned
#' compartment) of every position in every replicate.
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("concordances", "HiCDOCDataSet", function(object, passChecks = TRUE) {
if(passChecks == TRUE){
concordances <- object@concordances
passingChecks <- object@checks[centroid.check == TRUE &
PC1.check == TRUE &
assignment.check == TRUE, chromosome]
concordances[concordances@seqnames %in% passingChecks]
} else {
object@concordances
}
})
#### show ####
#' @rdname HiCDOCDataSet-methods
#' @export
setMethod("show", "HiCDOCDataSet", function(object) {
cat(
"Object of class 'HiCDOCDataSet'\n\n",
"- Inputs:\n",
paste0(
" ",
vapply(
object@input,
function(x) paste0(x),
character(ifelse(
length(object@input) > 0,
length(object@input[[1]]),
1
))
),
"\n"
),
"\n",
"- Chromosomes:\n ",
if (
is.null(object@chromosomes) ||
length(object@chromosomes) == 0
) {
"None"
} else {
paste(object@chromosomes, collapse = ", ")
},
"\n\n",
"- Replicates:\n",
if (
is.null(sampleReplicates(object)) ||
length(sampleReplicates(object)) == 0
) {
" None\n"
} else {
paste0(
" condition ",
sampleConditions(object),
", replicate ",
sampleReplicates(object),
"\n"
)
},
"\n",
"- Parameters:\n",
paste0(
" ",
vapply(
seq_along(parameters(object)),
function(x) {
paste(
names(parameters(object))[x],
'=',
parameters(object)[x]
)
},
character(1)
),
"\n"
),
"\n",
"- Methods:\n",
" chromosomes(object)\n",
" sampleConditions(object)\n",
" sampleReplicates(object)\n",
" compartments(object)\n",
" differences(object)\n",
" concordances(object)\n",
" parameters(object)\n",
" parameters(object) <- list()\n\n",
sep = ""
)
})
#### parameters ####
#' Access the parameters of a \code{\link{HiCDOCDataSet}}.
#' @rdname HiCDOCDataSet-parameters
#' @export
setMethod("parameters", "HiCDOCDataSet", function(object) {
object@parameters
})
#### parameters<- ####
#' Change the parameters of a \code{\link{HiCDOCDataSet}}.
#' @rdname HiCDOCDataSet-parameters
#' @export
setReplaceMethod("parameters", "HiCDOCDataSet", function(object, value) {
defaultParameterNames <- names(defaultHiCDOCParameters)
if (!is(value, "list")) {
stop(
"'parameters' must be a named list.\n",
"No parameters were updated. ",
"See 'help(parameters)' for details.",
call. = FALSE
)
}
parameterNames <- names(value)
duplicatedParameterNames <- unique(
parameterNames[duplicated(parameterNames)]
)
if (length(duplicatedParameterNames) > 0) {
stop(
"Duplicate parameter",
if (length(duplicatedParameterNames) != 1) "s",
" provided: ",
paste(duplicatedParameterNames, collapse = ", "),
"\nNo parameters were updated. ",
"See 'help(parameters)' for details.",
call. = FALSE
)
}
invalidParameterNames <- parameterNames[
!(parameterNames %in% defaultParameterNames)
]
if (length(invalidParameterNames) > 0) {
stop(
"Invalid parameter",
if (length(invalidParameterNames) != 1) "s",
" provided: ",
paste(invalidParameterNames, collapse = ", "),
"\nNo parameters were updated. ",
"See 'help(parameters)' for details.",
call. = FALSE
)
}
object@parameters[parameterNames] <- value
return(object)
})
<file_sep>#' .modeVector Extract the mode of vector.
#'
#' @param x A vector
#'
#' @return The mode of a vector
#'
#' @examples
#' .modeVector(c(1, 2, 2, 2, 4))
#' @noRd
.modeVector <- function(x) {
ux <- unique(x)
ux[which.max(tabulate(match(x, ux)))]
}
#' @description
#' Determines the number of bins per chromosome.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A list of the number of bins per chromosome.
#'
#' @keywords internal
#' @noRd
.determineChromosomeSizes <- function(object) {
tabChromosomes <- as.data.table(InteractionSet::regions(object))
tabChromosomes[, minIndex := min(index), by = .(seqnames)]
# minStart to correct chromosomes not starting at the 0 position
tabChromosomes[
index == minIndex,
minStart := round(start / width),
by = .(seqnames)
]
# Comuting chromosome entire size
tabChromosomes <- tabChromosomes[
,
.(binSize = max(index) - min(index) + 1 + max(minStart, na.rm = TRUE)),
by = .(seqnames)
]
totalBins <- tabChromosomes$binSize
names(totalBins) <- tabChromosomes$seqnames
return(totalBins)
}
#' @description
#' Determines the valid conditions and replicates by chromosomes (not empty)
#'
#' @param interactions
#' An InteractionSet object
#'
#' @return
#' A list of length 2, validConditions and validreplicates.
#'
#' @keywords internal
#' @noRd
.determineValids <- function(object) {
valids <- S4Vectors::split(
SummarizedExperiment::assay(object),
S4Vectors::mcols(object)$chromosome,
drop = FALSE
)
valids <- lapply(valids, function(x)
apply(x, 2, stats::var, na.rm=TRUE))
valids <- lapply(valids, function(x) which(x>0 & !is.na(x)))
return(valids)
}
#' @description
#' Fills parameters and slots describing the data. Called by a
#' \code{\link{HiCDOCDataSet}} constructor.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A filled \code{\link{HiCDOCDataSet}} ready for analysis.
#'
#' @keywords internal
#' @noRd
.fillHiCDOCDataSet <- function(object) {
# Reduce the levels in interaction part
object <- InteractionSet::reduceRegions(object)
objectRegions <- InteractionSet::regions(object)
chromosomeNames <- unique(as.character(
GenomeInfoDb::seqnames(objectRegions)
))
chromosomeNames <- gtools::mixedsort(chromosomeNames)
GenomeInfoDb::seqlevels(
InteractionSet::regions(object),
pruning.mode = "coarse"
) <- chromosomeNames
# Add chromosome column for split purpose
chromosomes <-
GenomeInfoDb::seqnames(InteractionSet::anchors(object, "first"))
chromosomes <- S4Vectors::Rle(factor(chromosomes, levels = chromosomeNames))
S4Vectors::mcols(object) <- S4Vectors::DataFrame("chromosome" = chromosomes)
# Sorting interactions and assay
ids <- InteractionSet::anchors(object, id = TRUE)
neworder <- order(chromosomes, ids$first, ids$second)
object <- object[neworder, ]
# Fill all other slots than interactionSet part
# Chromosomes and their size (max bin)
object@chromosomes <- chromosomeNames
object@totalBins <- .determineChromosomeSizes(object)
object@parameters <- defaultHiCDOCParameters
# Valid conditions and replicates by chromosome (==not empty)
# maybe do a function for valid conditions and replicates ?
valids <- .determineValids(object)
object@validAssay <- valids
# Weakbins
object@weakBins <- vector("list", length(object@chromosomes))
names(object@weakBins) <- object@chromosomes
return(object)
}
<file_sep>test_that("HiCDOCDataSetFromHic works as expected", {
paths <-
system.file("extdata", "liver_18_10M.hic", package = "HiCDOC")
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- 1
conditions <- 1
binSize <- 500000
# Instantiation of data set
expect_message(
object <- HiCDOCDataSetFromHiC(
paths,
replicates = replicates,
conditions = conditions,
binSize = binSize
),
"liver_18_10M.hic")
expect_equal(length(object), 210)
expect_identical(object@chromosomes, "18")
expect_identical(object$condition, 1)
expect_identical(object$replicate, 1)
})
<file_sep>#' @title
#' Plot the distribution of concordance differences.
#'
#' @description
#' Plots the distribution of concordance differences, which are the differences
#' between concordances of each pair of replicates from different conditions. A
#' concordance can be understood as a confidence in a genomic position's
#' assigned compartment. Mathematically, it is the log ratio of a genomic
#' position's distance to each compartment's centroid, normalized by the
#' distance between both centroids, and min-maxed to a [-1,1] interval.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' plotConcordanceDifferences(exampleHiCDOCDataSetProcessed)
#'
#' @export
plotConcordanceDifferences <- function(object) {
.validateSlots(object, slots = c("comparisons"))
differences <- object@comparisons[, changed := data.table::fifelse(
compartment.1 == compartment.2,
"FALSE",
"TRUE"
)]
plot <- ggplot(
differences,
aes(x = difference, fill = changed)
) + geom_histogram() + labs(
x = "Concordance",
title = "Distribution of concordance differences",
fill = "Change\nof\ncompartment"
)
return(plot)
}
<file_sep>#' @description
#' Returns parameters, updated with their default values if invalid.
#'
#' @param objectParameters
#' A list of parameters.
#'
#' @return
#' A list of valid parameters.
#'
#' @keywords internal
#' @noRd
.validateParameters <- function(objectParameters) {
defaultParameterNames <- names(defaultHiCDOCParameters)
inputParameterNames <- names(objectParameters)
numericParameters <- vapply(
objectParameters,
function(parameter)
is.numeric(parameter) && length(parameter) == 1,
FUN.VALUE = TRUE
)
if (!all(numericParameters)) {
notNumericParameters <- inputParameterNames[!numericParameters]
warning(
"Non-numeric parameter",
if (length(notNumericParameters) != 1) "s were" else " was",
" replaced with ",
if (length(notNumericParameters) != 1) "their" else "its",
" default",
if (length(notNumericParameters) != 1) "s",
":\n",
paste0(
notNumericParameters,
":",
" ",
objectParameters[notNumericParameters],
" -> ",
defaultHiCDOCParameters[notNumericParameters],
collapse = "\n"
),
call. = FALSE
)
objectParameters[notNumericParameters] <- defaultHiCDOCParameters[
notNumericParameters
]
}
known <- inputParameterNames %in% defaultParameterNames
if (!all(known)) {
unknownParameterNames <- inputParameterNames[!known]
warning(
"Unknown parameter",
if (length(unknownParameterNames) != 1) "s were" else " was",
" removed:\n",
paste(unknownParameterNames, collapse = "\n"),
call. = FALSE
)
objectParameters[unknownParameterNames] <- NULL
}
present <- defaultParameterNames %in% inputParameterNames
if (!all(present)) {
missingParameterNames <- defaultParameterNames[!present]
warning(
"Missing parameter",
if (length(missingParameterNames) != 1) "s were" else " was",
" replaced with ",
if (length(missingParameterNames) != 1) "their" else "its",
" default",
if (length(missingParameterNames) != 1) "s",
":\n",
paste0(
missingParameterNames,
":",
defaultHiCDOCParameters[missingParameterNames],
collapse = "\n"
),
call. = FALSE
)
objectParameters[missingParameterNames] <- defaultHiCDOCParameters[
missingParameterNames
]
}
return(objectParameters)
}
#' @description
#' Returns valid chromosome, condition, or replicate names, from given names
#' or id.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @param names
#' One or several names or ids to look up.
#'
#' @param category
#' The category in which to look up the names or ids. One of "chromosomes",
#' "conditions", "replicates". Defaults to "chromosomes".
#'
#' @return
#' Valid names.
#'
#' @keywords internal
#' @noRd
.validateNames <-
function(object, names, category = "chromosomes") {
validNames <- switch(
category,
"chromosomes" = unique(object@chromosomes),
"replicates" = unique(object$replicate),
"conditions" = unique(object$condition)
)
if (all(names %in% validNames)) {
return(names)
}
if (is.numeric(names) && all(names %in% seq_len(length(validNames)))) {
return(validNames[names])
}
unknown <- names[which(!(names %in% validNames))]
stop(
"Unknown ",
substr(category, 1, nchar(category) - 1),
if (length(unknown) != 1) "s",
": ",
paste(unknown, collapse = ", "),
call. = FALSE
)
}
#' @description
#' Checks that the provided object is a \code{\link{HiCDOCDataSet}}, and that
#' the provided slots are in that object.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @param slots
#' The names of slots to verify. Defaults to NULL.
#'
#' @return
#' Raises an error if the object is not a \code{\link{HiCDOCDataSet}}, or if one
#' of the slots is missing.
#'
#' @keywords internal
#' @noRd
.validateSlots <- function(object, slots = NULL) {
if (!is(object, "HiCDOCDataSet")) {
stop("The provided object is not a 'HiCDOCDataSet'.", call. = FALSE)
}
if (
"interactions" %in% slots && (
!.hasSlot(object, "interactions") ||
is.null(slot(object, "interactions")) ||
nrow(object) == 0
)
) {
stop("No interactions found in the 'HiCDOCDataSet'.", call. = FALSE)
}
if (!is.null(slots)) {
allSlots <- slotNames("HiCDOCDataSet")
presentSlots <- allSlots[vapply(
allSlots,
function(x) .hasSlot(object, x) && !is.null(slot(object, x)),
FUN.VALUE = TRUE
)]
missingSlots <- slots[!(slots %in% presentSlots)]
if ("binSize" %in% missingSlots) {
stop(
"Resolution is unknown.\n",
"This 'HiCDOCDataSet' wasn't built properly.",
call. = FALSE
)
}
if ("totalBins" %in% missingSlots) {
stop(
"Chromosome lengths are unknown.\n",
"This 'HiCDOCDataSet' wasn't built properly.",
call. = FALSE
)
}
if ("validAssay" %in% missingSlots) {
stop(
"Cannot process potentially sparse replicates.\n",
"First, run 'filterSparseReplicates()' on the object.",
call. = FALSE
)
}
if ("weakBins" %in% missingSlots) {
stop(
"Cannot process potentially weak positions.\n",
"First, run 'filterWeakPositions()' on the object.",
call. = FALSE
)
}
compartmentSlots <- c(
"compartments",
"concordances",
"differences",
"distances",
"centroids",
"selfInteractionRatios"
)
if (any(compartmentSlots %in% missingSlots)) {
stop(
"No compartments found.\n",
"First, run 'detectCompartments()' on the object.",
call. = FALSE
)
}
if (length(missingSlots) > 0) {
stop(
"Missing slots: ",
paste(missingSlots, collapse = ", "),
call. = FALSE
)
}
}
}
<file_sep>test_that("plotConcordances returns error if no compartments", {
data(exampleHiCDOCDataSet)
expect_error(
pp <- plotConcordances(exampleHiCDOCDataSet),
"No compartments found."
)
})
test_that("plotConcordances behaves as expected", {
data(exampleHiCDOCDataSetProcessed)
expect_error(
plotConcordances(exampleHiCDOCDataSetProcessed),
'"chromosome"'
)
expect_error(plotConcordances(exampleHiCDOCDataSetProcessed, 6), "Unknown")
pp <- plotConcordances(exampleHiCDOCDataSetProcessed, 1)
expect_is(pp, "ggplot")
expect_identical(
pp$labels[c("title", "caption", "x", "y", "colour")],
list("title" = "Concordances of chromosome X by condition",
"caption" = "The grey areas are significant changes (adjusted p-value <= 5%)",
"x" = "position",
"y" = "concordance",
"colour" = "replicate"
)
)
expect_is(pp$layers[[1]]$geom, "GeomRect")
expect_is(pp$layers[[2]]$geom, "GeomLine")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep>#' @title
#' A/B compartment detection and differential analysis
#' @docType package
#' @import methods
#' @import zlibbioc
#' @import ggplot2
#' @import InteractionSet
#' @importFrom GenomicRanges start end distance GRanges match union
#' @importFrom Rcpp evalCpp sourceCpp
#' @importFrom stats loess loess.control ecdf predict median update optimize prcomp p.adjust quantile wilcox.test
#' @importFrom S4Vectors DataFrame mcols split runLength Rle %in%
#' @importFrom SummarizedExperiment assay colData
#' @importFrom gtools mixedsort
#' @importFrom pbapply pbmapply
#' @importFrom BiocParallel bpparam bpmapply bplapply
#' @importFrom BiocGenerics cbind width
#' @importFrom cowplot ggdraw get_legend
#' @importFrom grid textGrob gpar
#' @importFrom gridExtra arrangeGrob
#' @importFrom multiHiCcompare make_hicexp cyclic_loess hic_table
#' @importFrom GenomeInfoDb seqlevels seqnames
#' @import data.table
#' @useDynLib HiCDOC
#' @aliases HiCDOC-package
"_PACKAGE"
<file_sep>#' @title
#' Plot compartment changes.
#'
#' @description
#' Plots the predicted compartments, along with their concordance in each
#' replicate, and significant changes between experiment conditions.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param threshold
#' Significance threshold for the compartment changes. Defaults to 0.05.
#' @param xlim
#' A vector of the minimum and maximum positions to display. If NULL, displays
#' all positions. Defaults to NULL.
#' @param points
#' Whether or not to add points to the concordances. Defaults to FALSE.
#' @param checks
#' Whether or not to add sanity checks messages. Default to TRUE.
#' @param colour
#' Border color for the compartments. Default to `gray90`. `NA` means no border.
#'
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' plotCompartmentChanges(exampleHiCDOCDataSetProcessed, chromosome = 1)
#'
#' @export
plotCompartmentChanges <- function(
object,
chromosome,
threshold = 0.05,
xlim = NULL,
points = FALSE,
checks = TRUE,
colour = "gray90"
) {
.validateSlots(
object,
slots = c(
"concordances",
"compartments",
"differences"
)
)
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
concordancesPlot <- plotConcordances(
object,
chromosomeName,
xlim,
threshold,
points
)
compartmentsPlot <- plotCompartments(
object,
chromosomeName,
xlim,
colour
)
if (is.null(compartmentsPlot) || is.null(concordancesPlot)) {
return(NULL)
}
# Messages for the user
captionConcordances <- concordancesPlot$labels$caption
concordancesPlot$labels$caption <- NULL
# Horizontal alignment of the sub-graphs (change width of the plots)
plotsGrobs <- lapply(
list(
compartmentsPlot + theme(legend.position = "none",
plot.margin = unit(c(1,0,0,0), "lines")) +
labs(title=NULL),
concordancesPlot + theme(legend.position = "none",
plot.margin = unit(c(0,0,0,0), "lines")) +
labs(title=NULL)
),
ggplot2::ggplotGrob
)
commonWidths <- plotsGrobs[[length(plotsGrobs)]]$widths
plotsGrobs <- lapply(
plotsGrobs,
function(x) {
x$widths <- commonWidths
return(x)
}
)
if(checks){
messages <- .messageCheck(object, chromosomeName)
messages <- paste(messages, collapse = "\n")
messages <- paste0("Quality controls:\n", messages)
legendsGrob <- gridExtra::arrangeGrob(
gridExtra::arrangeGrob(
cowplot::get_legend(compartmentsPlot),
cowplot::get_legend(concordancesPlot),
ncol = 1,
nrow = 2
),
grid::textGrob(label=messages, x=0.9, y=0.1,
just=c("right", "bottom"),
gp=grid::gpar(fontsize=8)),
ncol = 2,
nrow = 1,
padding = unit(1, "cm")
)
} else {
legendsGrob <- gridExtra::arrangeGrob(
cowplot::get_legend(compartmentsPlot),
cowplot::get_legend(concordancesPlot),
ncol = 2,
nrow = 1,
padding = unit(1, "cm")
)
}
plot <- gridExtra::arrangeGrob(
plotsGrobs[[1]],
plotsGrobs[[2]],
grid::textGrob(label=captionConcordances, x=0.1, y=1,
just=c("left", "top"),
gp=grid::gpar(fontsize=8)),
legendsGrob,
heights = c(2, 10, 0.5, 2),
nrow=4,
ncol=1,
padding = unit(1, "lines"),
top = paste0(
"Compartments and concordances of chromosome ",
chromosomeName, " by condition"
)
)
return(cowplot::ggdraw(plot))
}
<file_sep>test_that("normalizeBiologicalBiases behaves as expected", {
data(exampleHiCDOCDataSet)
object <- filterSparseReplicates(exampleHiCDOCDataSet)
object <- filterWeakPositions(object)
# Apply normalization
expect_message(
norm <- normalizeBiologicalBiases(object),
"Chromosome Z: normalizing biological biases."
)
expect_equal(length(norm), length(object))
assay <- SummarizedExperiment::assay(norm)
expect_equal(sum(!is.na(assay)), 181566)
expect_equal(round(colSums(assay, na.rm=TRUE),2),
c(179.82, 179.82, 79.29, 220.81,
179.81, 179.81, 280.61))
})
<file_sep>#' @description
#' Computes the euclidean distance between two vectors.
#'
#' @param x
#' A vector.
#' @param y
#' A vector.
#'
#' @return
#' A float number.
#'
#' @keywords internal
#' @noRd
.euclideanDistance <- function(x, y) {
sqrt(sum((x - y) ^ 2))
}
#' @description
#' Computes the log ratio of the distance of a position to each centroid.
#'
#' @param x
#' The vector of a genomic position.
#' @param centroids
#' A list of two vectors.
#' @param eps
#' A small float number to avoid log(0).
#'
#' @return
#' A float number.
#'
#' @keywords internal
#' @noRd
.distanceRatio <- function(x, centroids) {
epsilon = .euclideanDistance(centroids[[1]], centroids[[2]]) * 1e-10
return(
log(
(
.euclideanDistance(x, centroids[[1]]) + epsilon
) / (
.euclideanDistance(x, centroids[[2]]) + epsilon
)
)
)
}
#' @description
#' Assigns correct cluster labels by comparing centroids across conditions.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}} with corrected cluster labels in compartments,
#' concordances, distances and centroids.
#'
#' @keywords internal
#' @noRd
.tieCentroids <- function(object) {
validChromosomeNames <- names(
base::Filter(
function(x) !is.null(x),
object@validAssay
)
)
referenceConditionNames <- vapply(
validChromosomeNames,
FUN = function(x) sort(object$condition[object@validAssay[[x]]])[1],
FUN.VALUE = ""
)
referenceCentroids <- data.table::merge.data.table(
object@centroids[
cluster == 1 &
condition == referenceConditionNames[chromosome],
.(chromosome, reference.1 = centroid)
],
object@centroids[
cluster == 2 &
condition == referenceConditionNames[chromosome],
.(chromosome, reference.2 = centroid)
],
all = TRUE
)
clusters <- data.table::merge.data.table(
object@centroids[
cluster == 1,
.(chromosome, condition, centroid.1 = centroid)
],
object@centroids[
cluster == 2,
.(chromosome, condition, centroid.2 = centroid)
],
all = TRUE
)
clusters <- data.table::merge.data.table(
clusters,
referenceCentroids,
all = TRUE
)
c1_r1 <- mapply(
function(x, y) .euclideanDistance(unlist(x), unlist(y)),
clusters$centroid.1,
clusters$reference.1
)
c1_r2 <- mapply(
function(x, y) .euclideanDistance(unlist(x), unlist(y)),
clusters$centroid.1,
clusters$reference.2
)
c2_r1 <- mapply(
function(x, y) .euclideanDistance(unlist(x), unlist(y)),
clusters$centroid.2,
clusters$reference.1
)
c2_r2 <- mapply(
function(x, y) .euclideanDistance(unlist(x), unlist(y)),
clusters$centroid.2,
clusters$reference.2
)
clusters[, cluster.1 := 1 * ((c1_r1 * c2_r2) >= (c1_r2 * c2_r1)) + 1]
clusters[, cluster.2 := 1 + (cluster.1 == 1)]
clusters <- clusters[, .(chromosome, condition, cluster.1, cluster.2)]
object@compartments <- data.table::merge.data.table(
object@compartments,
clusters,
by = c("chromosome", "condition"),
all.x = TRUE,
sort = FALSE
)
object@compartments[
,
cluster := ifelse(
cluster == 1,
cluster.1,
cluster.2
)
]
object@compartments[, `:=`(cluster.1 = NULL, cluster.2 = NULL)]
object@concordances <- data.table::merge.data.table(
object@concordances,
clusters,
by = c("chromosome", "condition"),
all.x = TRUE,
sort = FALSE
)
object@concordances[, change := -1]
object@concordances[
cluster == 1 & cluster == cluster.1,
change := 1
]
object@concordances[
cluster == 2 & cluster == cluster.2,
change := 1
]
object@concordances[, concordance := change * concordance]
object@concordances[, compartment := data.table::fifelse(
cluster == 1,
cluster.1,
cluster.2
)]
object@concordances[, `:=`(
cluster.1 = NULL,
cluster.2 = NULL,
change = NULL
)]
object@distances <- data.table::merge.data.table(
object@distances,
clusters,
by = c("chromosome", "condition"),
all.x = TRUE,
sort = FALSE
)
object@distances[, cluster := data.table::fifelse(
cluster == 1,
cluster.1,
cluster.2
)]
object@distances[, `:=`(cluster.1 = NULL, cluster.2 = NULL)]
object@centroids <- data.table::merge.data.table(
object@centroids,
clusters,
by = c("chromosome", "condition"),
all.x = TRUE,
sort = FALSE
)
object@centroids[, cluster := data.table::fifelse(
cluster == 1,
cluster.1,
cluster.2
)]
object@centroids[, `:=`(cluster.1 = NULL, cluster.2 = NULL)]
return(object)
}
#' @description
#' Constructs a link matrix of interaction rows to be clustered together.
#'
#' @param totalReplicates
#' The number of replicates.
#' @param totalBins
#' The number of bins.
#'
#' @return
#' A matrix, each row holding the row indices of interactions to be clustered
#' together.
#'
#' @keywords internal
#' @noRd
.constructLinkMatrix <- function(totalReplicates, totalBins) {
return(
matrix(
rep(0:(totalReplicates - 1), totalBins) * totalBins +
rep(0:(totalBins - 1), each = totalReplicates),
nrow = totalBins,
byrow = TRUE
)
)
}
#' @description
#' Segregates positions of a chromosome into two clusters using constrained
#' K-means.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome.
#' @param conditionName
#' The name of a condition.
#'
#' @return
#' A list of:
#' - The compartment (cluster number) of each position.
#' - The concordance (float) of each genomic position in each replicate.
#' - The distances to centroids (float) of each position in each replicate.
#' - The centroid (vector) of each cluster.
#'
#' @md
#' @keywords internal
#' @noRd
.clusterizeChromosomeCondition <- function(object) {
chromosomeName <- object@chromosomes
conditionName <- object$condition[1]
totalBins <- length(InteractionSet::regions(object))
validAssay <- object@validAssay[[chromosomeName]]
if (length(validAssay) == 0) {
return(NULL)
}
replicateNames <- object$replicate[validAssay]
orderedAssay <- validAssay[order(replicateNames)]
chromosomeInteractionSet <- InteractionSet::InteractionSet(
SummarizedExperiment::assay(object),
InteractionSet::interactions(object)
)
matrixAssay <- lapply(
orderedAssay,
FUN = function(x) {
InteractionSet::inflate(
chromosomeInteractionSet,
rows = chromosomeName,
columns = chromosomeName,
sample = x
)
}
)
matrixAssay <- lapply(
matrixAssay,
function(x) x@matrix
)
matrixAssay <- do.call("rbind", matrixAssay)
matrixAssay[is.na(matrixAssay)] <- 0
mustLink <- .constructLinkMatrix(length(replicateNames), totalBins)
clusteringOutput <- constrainedClustering(
matrixAssay,
mustLink,
object@parameters$kMeansDelta,
object@parameters$kMeansIterations,
object@parameters$kMeansRestarts
)
# TODO : question : pourquoi on ne prend que les premiers ?
# Quel est l'intérêt de retourner 2 fois ?
clusters <- as.integer(clusteringOutput[["clusters"]][0:totalBins] + 1)
centroids <- clusteringOutput[["centroids"]]
min <- .distanceRatio(centroids[[1]], centroids)
max <- .distanceRatio(centroids[[2]], centroids)
concordances <- apply(
matrixAssay,
1,
function(row) {
2 * (.distanceRatio(row, centroids) - min) / (max - min) - 1
}
)
distances <- apply(
matrixAssay,
1,
function(row) {
c(
.euclideanDistance(row, centroids[[1]]),
.euclideanDistance(row, centroids[[2]])
)
}
)
indices <- S4Vectors::mcols(InteractionSet::regions(object))$index
dfCompartments <- data.table::data.table(
"chromosome" = chromosomeName,
"index" = indices,
"condition" = conditionName,
"cluster" = clusters
)
dfConcordances <- data.table::data.table(
"chromosome" = chromosomeName,
"index" = rep(indices, length(replicateNames)),
"condition" = conditionName,
"replicate" = rep(sort(replicateNames), each = totalBins),
"cluster" = rep(clusters, length(replicateNames)),
"concordance" = concordances
)
dfDistances <- data.table::data.table(
"chromosome" = chromosomeName,
"index" = rep(indices, 2 * length(replicateNames)),
"condition" = conditionName,
"replicate" = rep(rep(sort(replicateNames), each = totalBins), 2),
"cluster" = rep(c(1, 2), each = length(replicateNames) * totalBins),
"distance" = c(t(distances))
)
dfCentroids <- data.table::data.table(
"chromosome" = chromosomeName,
"condition" = conditionName,
"cluster" = c(1, 2),
"centroid" = centroids
)
return(
list(
"compartments" = dfCompartments,
"concordances" = dfConcordances,
"distances" = dfDistances,
"centroids" = dfCentroids
)
)
}
#' @description
#' Runs the clustering to detect compartments in each chromosome and condition.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param parallel
#' Whether or not to parallelize the processing. Defaults to FALSE.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}} with compartments, concordances, distances
#' and centroids.
#'
#' @keywords internal
#' @noRd
.clusterize <- function(object, parallel = FALSE) {
conditionsPerChromosome <- lapply(
object@chromosomes,
FUN = function(x) {
data.frame(
"chromosome" = x,
"condition" = sort(unique(
object$condition[object@validAssay[[x]]]
))
)
}
)
conditionsPerChromosome <- Reduce(rbind, conditionsPerChromosome)
reducedObjects <- mapply(
function(x, y) {
reduceHiCDOCDataSet(
object,
chromosomes = x,
conditions = y,
dropLevels = TRUE
)
},
conditionsPerChromosome$chromosome,
conditionsPerChromosome$condition
)
result <- .internalLapply(
parallel,
reducedObjects,
.clusterizeChromosomeCondition
)
compartments <- lapply(
result,
function(x) x[["compartments"]]
)
concordances <- lapply(
result,
function(x) x[["concordances"]]
)
distances <- lapply(
result,
function(x) x[["distances"]]
)
centroids <- lapply(
result,
function(x) x[["centroids"]]
)
object@compartments <- data.table::rbindlist(compartments)
object@concordances <- data.table::rbindlist(concordances)
object@distances <- data.table::rbindlist(distances)
object@centroids <- data.table::rbindlist(centroids)
return(object)
}
#' @description
#' Computes the ratio of self interaction vs median of other interactions for
#' each position.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome.
#' @param conditionName
#' The name of a condition.
#' @param replicateName
#' The name of a replicate.
#'
#' @return
#' A data.table
#'
#' @keywords internal
#' @noRd
.computeSelfInteractionRatios <- function(object) {
ids <- InteractionSet::anchors(object, id = FALSE)
diagonal <- (ids$first == ids$second)
ids <- lapply(ids, as.data.table)
columnNames <- paste(object$condition, object$replicate)
matrixAssay <- SummarizedExperiment::assay(object)
colnames(matrixAssay) <- columnNames
# Values on diagonal
onDiagonal <- data.table::data.table(
ids$first[diagonal,.(chromosome = seqnames, index)],
matrixAssay[diagonal, , drop=FALSE]
)
onDiagonal <- data.table::melt.data.table(
onDiagonal,
id.vars = c("chromosome", "index"),
value.name = "ratio",
na.rm = TRUE
)
# Compute median by bin, out of diagonal
offDiagonal <- data.table::data.table(
"chromosome" = c(ids$first$seqnames[!diagonal],
ids$second$seqnames[!diagonal]),
"index" = c(ids$first$index[!diagonal],
ids$second$index[!diagonal]),
matrixAssay[!diagonal, , drop=FALSE]
)
offDiagonal <- data.table::melt.data.table(
offDiagonal,
id.vars = c("chromosome", "index"),
value.name = "offDiagonal",
variable.name = "variable",
na.rm = TRUE
)
offDiagonal <- offDiagonal[!is.na(offDiagonal)]
offDiagonal <- offDiagonal[
,
.(offDiagonal = sum(offDiagonal, na.rm=TRUE)),
by = c("chromosome", "index", "variable")
]
# Ratio is value on diagonal - median (off diagonal), by bin
onDiagonal <- data.table::merge.data.table(
onDiagonal,
offDiagonal,
all = TRUE,
by = c("chromosome", "index", "variable"),
sort = FALSE
)
#Shoudn't happen after normalizations
onDiagonal[is.na(ratio) & !is.na(offDiagonal),ratio := 0]
onDiagonal[, c("condition", "replicate") := data.table::tstrsplit(
variable,
" ",
fixed = TRUE
)]
onDiagonal <- onDiagonal[, .(
chromosome,
index,
condition,
replicate,
ratio,
offDiagonal
)]
return(onDiagonal)
}
#' @description
#' Uses ratio between self interactions and other interactions to determine
#' which clusters correspond to compartments A and B.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param parallel
#' Whether or not to parallelize the processing. Defaults to FALSE.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}, with selfInteractionRatios, and with A and B
#' labels replacing cluster numbers in compartments, concordances, distances,
#' and centroids.
#'
#' @keywords internal
#' @noRd
.predictCompartmentsAB <- function(object, parallel = FALSE) {
ratios <- .computeSelfInteractionRatios(object)
object@selfInteractionRatios <- ratios
compartments <- data.table::merge.data.table(
object@compartments,
object@selfInteractionRatios,
by = c("chromosome", "index", "condition"),
all.x = TRUE,
sort = FALSE
)
compartments[, offDiagonal := NULL]
compartments[, ratio := as.numeric(ratio)]
compartments <- compartments[
,
.(ratio = stats::median(ratio, na.rm = TRUE)),
by = .(chromosome, cluster)
]
compartments <- data.table::dcast(
compartments,
chromosome ~ cluster,
value.var = "ratio",
fill = 0
)
compartments[, A := data.table::fifelse(`1` >= `2`, 1, 2)]
compartments <- compartments[, .(chromosome, A)]
object@compartments <- data.table::merge.data.table(
object@compartments,
compartments,
by = "chromosome",
all.x = TRUE
)
object@compartments[, compartment := factor(
data.table::fifelse(cluster == A, "A", "B"),
levels=c("A", "B"))]
object@compartments[, A := NULL]
object@concordances <- data.table::merge.data.table(
object@concordances,
compartments,
by = "chromosome",
all.x = TRUE
)
object@concordances[, change := data.table::fifelse(A == 1, 1,-1)]
object@concordances[, concordance := change * concordance]
object@concordances[, compartment := factor(
data.table::fifelse(cluster == A, "A", "B"),
levels = c("A", "B")
)]
object@concordances[, change := NULL]
object@concordances[, A := NULL]
object@distances <- data.table::merge.data.table(
object@distances,
compartments,
by = "chromosome",
all.x = TRUE
)
object@distances[, compartment := factor(
data.table::fifelse(cluster == A, "A", "B"),
levels = c("A", "B")
)]
object@distances[, A := NULL]
object@centroids <- data.table::merge.data.table(
object@centroids,
compartments,
by = "chromosome",
all.x = TRUE
)
object@centroids[, compartment := factor(
data.table::fifelse(cluster == A, "A", "B"),
levels = c("A", "B")
)]
object@centroids[, A := NULL]
return(object)
}
#' @description
#' Computes p-values for genomic positions whose assigned compartment switches
#' between two conditions.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}, with differences and their p-values.
#'
#' @keywords internal
#' @noRd
.computePValues <- function(object) {
# Compute median of differences between pairs of concordances
# N.b. median of differences != difference of medians
totalReplicates <- length(object$replicate)
concordances <- object@concordances
concordances[, condition := factor(
condition,
levels = sort(unique(object$condition))
)]
data.table::setorder(concordances, chromosome, index, condition, replicate)
concordances1 <- concordances[
rep(seq_len(nrow(concordances)), each = totalReplicates),
.(
chromosome,
index,
condition.1 = condition,
concordance.1 = concordance
)
]
concordances2 <- concordances[
rep(seq_len(nrow(concordances)), totalReplicates),
.(
chromosome = chromosome,
index = index,
condition.2 = condition,
concordance.2 = concordance
)
]
data.table::setorder(concordances2, chromosome, index)
concordances2[, `:=`(chromosome = NULL, index = NULL)]
concordanceDifferences <- base::cbind(concordances1, concordances2)
rm(concordances1, concordances2)
concordanceDifferences <- concordanceDifferences[
as.numeric(condition.1) < as.numeric(condition.2)
]
concordanceDifferences <- concordanceDifferences[,
.(difference = stats::median(abs(concordance.1 - concordance.2))),
by = .(chromosome, index, condition.1, condition.2)
]
# Format compartments per pair of conditions
# Join medians of differences and pairs of conditions
totalConditions <- length(unique(object$condition))
compartments <- object@compartments
compartments[, condition := factor(
condition,
levels = sort(unique(object$condition))
)]
data.table::setorder(compartments, chromosome, index, condition)
compartments1 <- compartments[
rep(seq_len(nrow(compartments)), each = totalConditions),
.(
chromosome,
index,
condition.1 = condition,
compartment.1 = compartment
)
]
compartments2 <- compartments[
rep(seq_len(nrow(compartments)), totalConditions),
.(
chromosome,
index,
condition.2 = condition,
compartment.2 = compartment
)
]
data.table::setorder(compartments2, chromosome, index)
compartments2[, `:=`(chromosome = NULL, index = NULL)]
comparisons <- base::cbind(compartments1, compartments2)
rm(compartments1, compartments2)
comparisons <- comparisons[
as.numeric(condition.1) < as.numeric(condition.2)
]
if(nrow(comparisons) == 0){
object@comparisons <- comparisons[,.(chromosome,
index,
condition.1,
condition.2,
compartment.1,
compartment.2,
difference = index)]
object@differences <- comparisons[,.(chromosome,
index,
condition.1,
condition.2,
pvalue = index,
pvalue.adjusted = index,
direction = compartment.1)]
return(object)
}
comparisons <- data.table::merge.data.table(
comparisons,
concordanceDifferences,
by = c("chromosome", "index", "condition.1", "condition.2")
)
data.table::setcolorder(
comparisons,
c(
"chromosome",
"index",
"condition.1",
"condition.2",
"compartment.1",
"compartment.2",
"difference"
)
)
object@comparisons <- comparisons
# Compute p-values for switching positions
# P-values for a condition pair computed from the whole genome distribution
differences <- copy(comparisons)
differences[compartment.1 == compartment.2 , H0_value := difference]
data.table::setorder(differences, condition.1, condition.2)
quantiles <- split(
differences,
by = c("condition.1", "condition.2")
)
quantiles <- lapply(
quantiles,
function(x) x[difference > 0]
)
quantiles <- lapply(
quantiles,
function(x) {
if (nrow(x) > 0) return(stats::ecdf(x$H0_value)(x$difference))
return(NULL)
}
)
quantiles <- do.call("c", quantiles)
differences[difference > 0, quantile := quantiles]
# Pvalues
differences <- differences[compartment.1 != compartment.2]
differences[, pvalue := 1 - quantile]
differences[pvalue < 0, pvalue := 0]
differences[pvalue > 1, pvalue := 1]
pvalueAdjusted <- split(
differences,
by = c("condition.1", "condition.2")
)
pvalueAdjusted <- lapply(
pvalueAdjusted,
function(x) {
if (nrow(x) > 0) return(stats::p.adjust(x$pvalue, method = "BH"))
return(NULL)
}
)
pvalueAdjusted <- do.call("c", pvalueAdjusted)
differences[, pvalue.adjusted := pvalueAdjusted]
# Changes
differences[, direction := data.table::fifelse(
compartment.1 == "A", "A->B", "B->A"
)]
differences[, direction := factor(direction, levels = c("A->B", "B->A"))]
differences <- differences[, .(
chromosome,
index,
condition.1,
condition.2,
pvalue,
pvalue.adjusted,
direction
)]
data.table::setorder(
differences,
chromosome,
index,
condition.1,
condition.2
)
object@differences <- differences
return(object)
}
#' @title
#' A and B compartments detection and differences across conditions.
#'
#' @description
#' Detects compartments for each genomic position in each condition, and
#' computes p-values for compartment differences between conditions.
#'
#' @details
#' \subsection{Genomic positions clustering}{
#' To clusterize genomic positions, the algorithm follows these steps:
#' \enumerate{
#' \item{
#' For each chromosome and condition, get the interaction vectors of
#' each genomic position. Each genomic position can have multiple
#' interaction vectors, corresponding to the multiple replicates in
#' that condition.
#' }
#' \item{
#' For each chromosome and condition, use constrained K-means to
#' clusterize the interaction vectors, forcing replicate interaction
#' vectors into the same cluster. The euclidean distance between
#' interaction vectors determines their similarity.
#' }
#' \item{
#' For each interaction vector, compute its concordance, which is
#' the confidence in its assigned cluster. Mathematically, it is the
#' log ratio of its distance to each centroid, normalized by the
#' distance between both centroids, and min-maxed to a [-1,1]
#' interval.
#' }
#' \item{
#' For each chromosome, compute the distance between all centroids
#' and the centroids of the first condition. The cross-condition
#' clusters whose centroids are closest are given the same cluster
#' label. This results in two clusters per chromosome, spanning all
#' conditions.
#' }
#' }
#' }
#' \subsection{A/B compartments prediction}{
#' To match each cluster with an A or B compartment, the algorithm follows these
#' steps:
#' \enumerate{
#' \item{
#' For each genomic position, compute its self interaction ratio,
#' which is the difference between its self interaction and the
#' median of its other interactions.
#' }
#' \item{
#' For each chromosome, for each cluster, get the median self
#' interaction ratio of the genomic positions in that cluster.
#' }
#' \item{
#' For each chromosome, the cluster with the smallest median self
#' interaction ratio is matched with compartment A, and the cluster
#' with the greatest median self interaction ratio is matched with
#' compartment B. Compartment A being open, there are more overall
#' interactions between distant genomic positions, so it is assumed
#' that the difference between self interactions and other
#' interactions is lower than in compartment B.
#' }
#' }
#' }
#' \subsection{Significant differences detection}{
#' To find significant compartment differences across conditions, and compute
#' their p-values, the algorithm follows three steps:
#' \enumerate{
#' \item{
#' For each pair of replicates in different conditions, for each
#' genomic position, compute the absolute difference between its
#' concordances.
#' }
#' \item{
#' For each pair of conditions, for each genomic position, compute
#' the median of its concordance differences.
#' }
#' \item{
#' For each pair of conditions, for each genomic position whose
#' assigned compartment switches, rank its median against the
#' empirical cumulative distribution of medians of all non-switching
#' positions in that condition pair. Adjust the resulting p-value
#' with the Benjamini–Hochberg procedure.
#' }
#' }
#' }
#' \subsection{Parallel processing}{
#' The parallel version of detectCompartments uses the
#' \code{\link[BiocParallel]{bpmapply}} function. Before to call the
#' function in parallel you should specify the parallel parameters such as:
#' \itemize{
#' \item{On Linux:
#'
#' \code{multiParam <- BiocParallel::MulticoreParam(workers = 10)}
#' }
#' \item{On Windows:
#'
#' \code{multiParam <- BiocParallel::SnowParam(workers = 10)}
#' }
#' }
#' And then you can register the parameters to be used by BiocParallel:
#'
#' \code{BiocParallel::register(multiParam, default = TRUE)}
#'
#' You should be aware that using MulticoreParam, reproducibility of the
#' detectCompartments function using a RNGseed may not work. See this
#' \href{https://github.com/Bioconductor/BiocParallel/issues/122}{issue}
#' for more details.
#' }
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @param parallel
#' Whether or not to parallelize the processing. Defaults to FALSE
#' See 'Details'.
#'
#' @param kMeansDelta
#' The convergence stop criterion for the clustering. When the centroids'
#' distances between two iterations is lower than this value, the clustering
#' stops. Defaults to \code{object$kMeansDelta} which is originally set to
#' \code{defaultHiCDOCParameters$kMeansDelta} = 0.0001.
#'
#' @param kMeansIterations
#' The maximum number of iterations during clustering. Defaults to
#' \code{object$kMeansIterations} which is originally set to
#' \code{defaultHiCDOCParameters$kMeansIterations} = 50.
#'
#' @param kMeansRestarts
#' The amount of times the clustering is restarted. For each restart, the
#' clustering iterates until convergence or reaching the maximum number of
#' iterations. The clustering that minimizes inner-cluster variance is selected.
#' Defaults to \code{object$kMeansRestarts} which is originally set to
#' \code{defaultHiCDOCParameters$kMeansRestarts} = 20.
#'
#' @param PC1CheckThreshold
#' The minimum percentage of variance that should be explained by
#' the first principal component of centroids to pass sanity check.
#' Defaults to \code{object$PC1CheckThreshold} which is originally set to
#' \code{defaultHiCDOCParameters$PC1CheckThreshold} = 0.75
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}, with compartments, concordances, distances,
#' centroids, and differences.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' ## Run all filtering and normalization steps (not run for timing reasons)
#' # object <- filterSmallChromosomes(exampleHiCDOCDataSet)
#' # object <- filterSparseReplicates(object)
#' # object <- filterWeakPositions(object)
#' # object <- normalizeTechnicalBiases(object)
#' # object <- normalizeBiologicalBiases(object)
#' # object <- normalizeDistanceEffect(object)
#'
#' # Detect compartments and differences across conditions
#' object <- detectCompartments(exampleHiCDOCDataSet)
#'
#' @usage
#' detectCompartments(
#' object,
#' parallel = FALSE,
#' kMeansDelta = NULL,
#' kMeansIterations = NULL,
#' kMeansRestarts = NULL,
#' PC1CheckThreshold = NULL
#' )
#'
#' @export
detectCompartments <- function(
object,
parallel = FALSE,
kMeansDelta = NULL,
kMeansIterations = NULL,
kMeansRestarts = NULL,
PC1CheckThreshold = NULL
) {
.validateSlots(
object,
slots = c(
"chromosomes",
"validAssay",
"parameters"
)
)
if (!is.null(kMeansDelta)) {
object@parameters$kMeansDelta <- kMeansDelta
}
if (!is.null(kMeansIterations)) {
object@parameters$kMeansIterations <- kMeansIterations
}
if (!is.null(kMeansRestarts)) {
object@parameters$kMeansRestarts <- kMeansRestarts
}
if (!is.null(PC1CheckThreshold)) {
object@parameters$PC1CheckThreshold <- PC1CheckThreshold
}
object@parameters <- .validateParameters(object@parameters)
message("Clustering genomic positions.")
object <- .clusterize(object, parallel)
object <- .tieCentroids(object)
message("Predicting A/B compartments.")
object <- .predictCompartmentsAB(object, parallel)
object <- .checkResults(object)
message("Detecting significant differences.")
object <- .computePValues(object)
# Reformating outputs
object <- .formatDetectCompartment(object)
return(object)
}
<file_sep>#' @title
#' Plot A/B compartments.
#'
#' @description
#' Plots the predicted compartments in each experiment condition.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param xlim
#' A vector of the minimum and maximum positions to display. If NULL, displays
#' all positions. Defaults to NULL.
#' @param colour
#' Border color for the compartments. Default to `gray90`. `NA` means no border.
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' plotCompartments(exampleHiCDOCDataSetProcessed, chromosome = 1)
#'
#' @export
plotCompartments <- function(
object,
chromosome,
xlim = NULL,
colour = "gray90"
) {
.validateSlots(object, slots = c("compartments"))
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
xlim <- .validateXlim(xlim, object, chromosomeName)
compartments <- object@compartments[
GenomeInfoDb::seqnames(object@compartments) == chromosomeName
]
if (length(compartments) == 0) {
message("No compartments for chromosome ", chromosomeName, ".")
return(NULL)
}
compartments <- as.data.table(compartments)
binSize <- .modeVector(compartments$width)
compartments[, position := start + 0.5 * binSize]
# for the last bin if not of the same size
if(identical(xlim, c(min(compartments[,start]), max(compartments[,end])))){
xlim <- c(min(xlim[1], compartments[,position]),
max(xlim[2], compartments[,start + binSize]))
}
plot <- ggplot(
data = compartments,
aes(x = position, fill = compartment)
) + geom_histogram(
binwidth = binSize,
colour = colour,
linewidth = 0.05
) + coord_cartesian(
xlim=c(xlim[1] - 0.5 * binSize, xlim[2] + 0.5 * binSize)
) + facet_grid(
rows = vars(condition),
margins = FALSE,
switch = "y"
) + labs(title = paste0("Compartments of chromosome ",
chromosomeName, " by condition")) +
theme_minimal() + theme(
axis.title = element_blank(),
axis.text = element_blank(),
axis.line = element_blank(),
axis.ticks = element_blank(),
panel.grid = element_blank(),
panel.spacing = unit(2, "pt"),
legend.position = "bottom",
legend.title = element_text(size = 8),
legend.text = element_text(size = 8),
strip.placement = "outside"
)
return(plot)
}
<file_sep>#include <algorithm> // std::find, std::all_of
#include <vector> // std::vector
#include <Rcpp.h>
using namespace Rcpp;
template <class T>
class StdMatrix {
std::vector<std::vector<T>> matrix;
public:
template <class U>
StdMatrix (U &m): matrix(m.nrow(), std::vector<T>(m.ncol())) {
for (int i = 0; i < m.nrow(); ++i) {
std::vector<T> row (m.row(i).begin(), m.row(i).end());
matrix[i] = row;
}
}
size_t nrow () const {
return matrix.size();
}
size_t ncol () const {
return matrix.front().size();
}
std::vector<T> &row (size_t i) {
return matrix[i];
}
};
double getDistance(
const std::vector<double> &vector1,
const std::vector<double> &vector2
) {
double d = 0.0;
for (size_t i = 0; i < vector1.size(); ++i) {
double t = vector1[i] - vector2[i];
d += t * t;
}
return sqrt(d);
}
double getCentroidsDelta(
const std::vector<std::vector<double>> &previousCentroids,
const std::vector<std::vector<double>> ¢roids
) {
double delta = 0.0;
for (
unsigned int centroidId = 0;
centroidId < previousCentroids.size();
centroidId++
) {
delta += getDistance(previousCentroids[centroidId], centroids[centroidId]);
}
return delta;
}
double getMedianValue(
std::vector<double> &v
) {
size_t n = v.size() / 2;
std::nth_element(v.begin(), v.begin() + n, v.end());
double med = v[n];
if(!(v.size() & 1)) { //If the set size is even
auto max_it = max_element(v.begin(), v.begin()+n);
med = (*max_it + med) / 2.0;
}
return med;
}
void getMedianVector(
const std::vector<std::vector<double>> &vectors,
std::vector<double> &medianVector
) {
std::vector<double> buffer(vectors.size());
for (unsigned int rowId = 0; rowId < vectors[0].size(); rowId++) {
for (unsigned int columnId = 0; columnId < vectors.size(); columnId++) {
buffer[columnId] = vectors[columnId][rowId];
}
medianVector[rowId] = getMedianValue(buffer);
}
}
void updateCentroids(
std::vector<std::vector<double>> ¢roids,
std::vector<int> &clusters,
StdMatrix<double> &matrix
) {
std::vector<unsigned int> totalClusterMembers(centroids.size(), 0);
for (std::vector<double> ¢roid: centroids) {
centroid = std::vector<double>(centroid.size(), 0.0);
}
for (unsigned int vectorId = 0; vectorId < matrix.nrow(); vectorId++) {
for (size_t i = 0; i < matrix.ncol(); ++i) {
centroids[clusters[vectorId]][i] += matrix.row(vectorId)[i];
}
totalClusterMembers[clusters[vectorId]]++;
}
for (
unsigned int centroidId = 0;
centroidId < centroids.size();
centroidId++
) {
if (totalClusterMembers[centroidId] > 0) {
for (size_t i = 0; i < centroids[centroidId].size(); ++i) {
centroids[centroidId][i] =
centroids[centroidId][i] / totalClusterMembers[centroidId];
}
}
}
}
struct NearestCentroid {
int centroidId;
// Should be double here!
float distance;
};
NearestCentroid getNearestCentroid(
std::vector<double> &vector,
std::vector<std::vector<double>> ¢roids
) {
NearestCentroid nearestCentroid;
nearestCentroid.centroidId = 0;
nearestCentroid.distance = std::numeric_limits<double>::max();
double distance;
for (
unsigned int centroidId = 0;
centroidId < centroids.size();
centroidId++
) {
if (centroids[centroidId].front() == -1.0) continue;
//if (centroids[centroidId].size() == 0) continue;
distance = getDistance(vector, centroids[centroidId]);
if (distance < nearestCentroid.distance) {
nearestCentroid.distance = distance;
nearestCentroid.centroidId = centroidId;
}
}
return nearestCentroid;
}
// Initialize centroids with K-means++
void initializeCentroids(
std::vector<std::vector<double>> ¢roids,
StdMatrix<double> &matrix
) {
std::vector<double> distances(matrix.nrow());
centroids[0] = matrix.row(unif_rand() * matrix.nrow());
double sum;
for (
unsigned int centroidId = 1;
centroidId < centroids.size();
centroidId++
) {
sum = 0;
for (unsigned int vectorId = 0; vectorId < matrix.nrow(); vectorId++) {
distances[vectorId] = getNearestCentroid(
matrix.row(vectorId),
centroids
).distance;
sum += distances[vectorId];
}
sum *= unif_rand();
for (unsigned int vectorId = 0; vectorId < matrix.nrow(); vectorId++) {
if ((sum -= distances[vectorId]) > 0) continue;
centroids[centroidId] = matrix.row(vectorId);
break;
}
}
}
void assignClusters(
std::vector<int> &clusters,
std::vector<std::vector<double>> ¢roids,
StdMatrix<double> &matrix,
StdMatrix<int> &links
) {
std::vector<double> medianVector(matrix.ncol());
int centroidId;
for (unsigned int linkId = 0; linkId < links.nrow(); linkId++) {
std::vector<int> &link = links.row(linkId);
std::vector<std::vector<double>> linkedVectors(link.size(), std::vector<double>(matrix.ncol(), 0.0));
for (size_t i = 0; i < link.size(); ++i) {
unsigned int vectorId = link[i];
if (vectorId >= matrix.nrow()) {
throw std::invalid_argument(
"Link (" +
std::to_string(vectorId) +
") out of range (" +
std::to_string(matrix.nrow()) +
").\n"
);
}
linkedVectors[i] = matrix.row(vectorId);
}
getMedianVector(linkedVectors, medianVector);
centroidId = getNearestCentroid(
medianVector,
centroids
).centroidId;
for (int vectorId: link) {
clusters[vectorId] = centroidId;
}
}
}
double clusterize(
StdMatrix<double> &matrix,
StdMatrix<int> &links,
std::vector<int> &clusters,
std::vector<std::vector<double>> ¢roids,
double maxDelta,
int maxIterations
) {
int totalIterations = 0;
double centroidsDelta;
std::vector<std::vector<double>> previousCentroids;
initializeCentroids(centroids, matrix);
do {
previousCentroids = centroids;
assignClusters(clusters, centroids, matrix, links);
updateCentroids(centroids, clusters, matrix);
totalIterations++;
centroidsDelta = getCentroidsDelta(previousCentroids, centroids);
} while (
(centroidsDelta > maxDelta) && (totalIterations < maxIterations)
);
double quality = 0.0;
for (unsigned int vectorId = 0; vectorId < matrix.nrow(); vectorId++) {
quality += getDistance(matrix.row(vectorId), centroids[clusters[vectorId]]);
}
return quality;
}
// [[Rcpp::export]]
List constrainedClustering(
NumericMatrix &rMatrix,
IntegerMatrix &rLinks,
double maxDelta = 0.0001,
int maxIterations = 50,
int totalRestarts = 20,
int totalClusters = 2
) {
if (any(is_na(rMatrix))) {
throw std::invalid_argument("Matrix should not contain NAs.");
}
if (any(is_na(rLinks))) {
throw std::invalid_argument("Links should not contain NAs.");
}
if (any(is_nan(rMatrix))) {
throw std::invalid_argument("Matrix should not contain NANs.");
}
if (any(is_nan(rLinks))) {
throw std::invalid_argument("Links should not contain NANs.");
}
if (rMatrix.nrow() == 0) {
throw std::invalid_argument("Matrix should not be empty.");
}
StdMatrix<double> matrix(rMatrix);
StdMatrix<int> links(rLinks);
std::vector<int> clusters(matrix.nrow());
std::vector<int> bestClusters(matrix.nrow());
std::vector<std::vector<double>> centroids(totalClusters, std::vector<double>(matrix.ncol(), -1.0));
std::vector<std::vector<double>> bestCentroids(totalClusters, std::vector<double>(matrix.ncol(), -1.0));
double quality, minQuality = std::numeric_limits<double>::max();
for (int restart = 0; restart < totalRestarts; restart++) {
quality = clusterize(
matrix, links, clusters, centroids, maxDelta, maxIterations
);
if (quality < minQuality) {
minQuality = quality;
bestClusters = clusters;
bestCentroids = centroids;
}
}
if ((std::find(bestClusters.begin(),
bestClusters.end(),
0) == bestClusters.end()) ||
(std::find(bestClusters.begin(),
bestClusters.end(),
1) == bestClusters.end())) {
throw std::invalid_argument(
"Failed clustering: one of the clusters is empty.\n"
);
}
List output;
output["clusters"] = wrap(bestClusters);
output["centroids"] = wrap(bestCentroids);
return output;
}
<file_sep>#' @description
#' Fill \code{\link{InteractionSet}} with possibly missing values
#'
#' @param interactionSet
#' An \code{\link{InteractionSet}}.
#' @param interactionSetUnion
#' The full \code{\link{InteractionSet}}.
#' @param fill
#' Fill missing values with this.
#'
#' @return
#' The full \code{\link{InteractionSet}}.
#'
#' @keywords internal
#' @noRd
.fillInteractionSet <- function(
interactionSet,
interactionSetUnion,
fill = NA
) {
over <- GenomicRanges::match(interactionSet, interactionSetUnion)
totalColumns <- ncol(interactionSet)
newAssays <- matrix(
rep(fill, length(interactionSetUnion) * totalColumns),
ncol = totalColumns
)
newAssays[over, ] <- SummarizedExperiment::assay(interactionSet)
return(
InteractionSet::InteractionSet(
newAssays,
interactionSetUnion,
colData = SummarizedExperiment::colData(interactionSet)
)
)
}
#' @description
#' Merge two different \code{\link{InteractionSet}}.
#'
#' @param interactionSet1
#' The first \code{\link{InteractionSet}}.
#' @param interactionSet2
#' The second \code{\link{InteractionSet}}.
#' @param fill
#' Fill missing values with this.
#'
#' @return
#' The merged \code{\link{InteractionSet}}.
#'
#' @keywords internal
#' @noRd
.mergeInteractionSet <- function(interactionSet1, interactionSet2, fill = NA) {
unionInteractions <- GenomicRanges::union(
InteractionSet::interactions(interactionSet1),
InteractionSet::interactions(interactionSet2)
)
# Complete InteractionSets
interactionSet1 <- .fillInteractionSet(
interactionSet1,
unionInteractions,
fill
)
interactionSet2 <- .fillInteractionSet(
interactionSet2,
unionInteractions,
fill
)
# Merge
newiset <- BiocGenerics::cbind(interactionSet1, interactionSet2)
return(newiset)
}
#' @description
#' Format the outputs produced by \code{detectCompartements}.
#' @param object
#' a HiCDOCDataSet object
#' @return
#' a HiCDOCDataSet object
#'
#' @keywords internal
#' @noRd
.formatDetectCompartment <- function(object) {
chromosomeNames <- object@chromosomes
conditionNames <- sort(unique(object$condition))
replicateNames <- sort(unique(object$replicate))
all.regions <- InteractionSet::regions(object)
# Concordances
object@concordances[, `:=`(
chromosome = factor(chromosome, levels = chromosomeNames),
replicate = factor(replicate, levels = replicateNames)
)]
concordances <- object@concordances
object@concordances <- all.regions[
match(concordances$index, S4Vectors::mcols(all.regions)$index)
]
S4Vectors::mcols(object@concordances) <- S4Vectors::DataFrame(
concordances[, .(
index,
condition,
replicate,
compartment,
concordance
)]
)
# Centroids
object@centroids[, `:=`(
chromosome = factor(chromosome, levels = chromosomeNames),
condition = factor(condition, levels = conditionNames)
)]
# Differences
object@differences <- data.table::merge.data.table(object@differences,
object@checks, by = "chromosome")
object@differences[, chromosome := factor(
chromosome,
levels = chromosomeNames
)]
object@differences[, significance := ""]
object@differences[pvalue.adjusted <= 0.05, significance := "*"]
object@differences[pvalue.adjusted <= 0.01, significance := "**"]
object@differences[pvalue.adjusted <= 0.001, significance := "***"]
object@differences[pvalue.adjusted <= 0.0001, significance := "****"]
differences <- object@differences
object@differences <- all.regions[
match(differences$index, S4Vectors::mcols(all.regions)$index)
]
S4Vectors::mcols(object@differences) <- S4Vectors::DataFrame(
differences[, .(
index,
condition.1,
condition.2,
pvalue,
pvalue.adjusted,
direction,
significance,
centroid.check,
PC1.check,
assignment.check
)]
)
# Compartments
object@compartments[, chromosome := factor(
chromosome,
levels = chromosomeNames
)]
object@compartments <- data.table::merge.data.table(object@compartments,
object@checks, by = "chromosome")
compartments <- object@compartments
object@compartments <- all.regions[
match(compartments$index, S4Vectors::mcols(all.regions)$index)
]
S4Vectors::mcols(object@compartments) <- S4Vectors::DataFrame(
compartments[, .(
index,
condition,
compartment,
centroid.check,
PC1.check,
assignment.check
)]
)
# Distances
object@distances[, `:=`(
chromosome = factor(chromosome, levels = chromosomeNames),
condition = factor(condition, levels = conditionNames),
replicate = factor(replicate, levels = replicateNames)
)]
# Comparisons
object@comparisons[, chromosome := factor(
chromosome,
levels = chromosomeNames
)]
# selfInteractionRatios
object@selfInteractionRatios[, `:=`(
chromosome = factor(chromosome, levels = chromosomeNames),
condition = factor(condition, levels = conditionNames),
replicate = factor(replicate, levels = replicateNames)
)]
return(object)
}
<file_sep>#' @description
#' Returns the provided limit if valid, or the minimium and maximium of the
#' chromosome's positions.
#'
#' @param xlim
#' A numeric vector of a minimum and a maximum limit for the x axis.
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome.
#'
#' @return
#' A numeric vector of a minimum and a maximum limit for the x axis.
#'
#' @keywords internal
#' @noRd
.validateXlim <- function(xlim, object, chromosomeName) {
if (!is.null(xlim)) {
if (length(xlim) != 2) {
message(
"Expected a vector of two numbers but received '",
paste(xlim, collapse = " "),
"'. Setting xlim to NULL."
)
xlim <- NULL
} else {
xlim <- sort(xlim)
}
}
if (is.null(xlim)) {
regions <- InteractionSet::regions(object)
regions <- regions[GenomeInfoDb::seqnames(regions) == chromosomeName]
xlim <- c(
min(GenomicRanges::start(regions), na.rm = TRUE),
max(GenomicRanges::end(regions), na.rm = TRUE)
)
}
return(xlim)
}
#' @description
#' Extract messages for the user after sanity checks
#'
#' @param object
#' A HiCDOCDataSet object.
#' @param chromosome
#' A chromosome name or index in chromosomes(object).
#'
#' @return
#' A character vector.
#'
#' @keywords internal
#' @noRd
.messageCheck <- function(object, chomosomeName) {
checks <- object@checks[chromosome == chomosomeName]
messagesChecks <- list(
"PC1" = "Centroid PC1 inertia: OK",
"centroids" = "A/B clustering consistency: OK",
"assignment" = "A/B assignment reliability: OK")
if(!checks$centroid.check){
messagesChecks$centroids <- "A/B clustering consistency: WARNING"
}
if(!checks$PC1.check){
messagesChecks$PC1 <- "Centroid PC1 inertia: WARNING"
}
if(!checks$assignment.check){
messagesChecks$assignment <- "A/B assignment reliability: FAIL"
}
return(messagesChecks)
}
#' @description
#' Complete the levels of replicates to get balanced condition x replicate
#' @param replicates
#' Vector of replicates for one conditino
#' @param expectedLength
#' Expected length of replicates levels
#' @param condition
#' Name of the condition
#'
#' @return
#' A vector with fictif levels if some missing
#'
#' @keywords internal
#' @noRd
.completeLevels <- function(replicates, expectedLength, condition) {
if (length(replicates) < expectedLength) {
complete <- paste0("R.", seq(expectedLength))
complete[seq(length(replicates))] <- replicates
} else {
complete <- replicates
}
return(complete)
}
#' @title
#' Compute PCA
#'
#' @description
#' Helper function that computes Principal Components of centroids.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' A chromosome name or index in \code{chromosomes(object)}.
#'
#' @return
#' A list, with a \code{data.table}, which contains the PCA,
#' and the variability explained by the 2 first axes.
#'
#' @keywords internal
#' @noRd
.computePca <- function(object, chromosomeName) {
df <- object@centroids[
chromosome == chromosomeName,
.(condition, compartment, centroid)
]
if (nrow(df) == 0) {
message("No centroids for chromosome ", chromosomeName, ".")
return(NULL)
}
conditions <- df$condition
compartments <- df$compartment
df <- lapply(df$centroid, unlist)
df <- do.call("rbind", df)
pca <- stats::prcomp(df)
varpca <- pca$sdev ^ 2
propvar <- varpca / sum(varpca)
pca <- as.data.table(pca$x)
pca[, condition := conditions]
pca[, compartment := compartments]
return(list(PCA = pca, propvar = propvar))
}
<file_sep>#' @description
#' Parses interactions in tabular format and fills the conditions, replicates,
#' and interactions slots of the provided \code{\link{InteractionSet}}.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param input
#' The name of the input file
#' @param conditions
#' The names of the conditions
#' (infered from the table if not specified).
#' @param replicates
#' The names of the replicates
#' (infered from the table if not specified).
#'
#' @return
#' An \code{\link{InteractionSet}}.
#'
#' @keywords internal
#' @noRd
.setFromTabular <- function(tabular, conditions = NULL, replicates = NULL) {
if (colnames(tabular)[1] != "chromosome") {
stop(
"First column of the input file must be named 'chromosome'.",
call. = FALSE
)
}
if (colnames(tabular)[2] == "position.1") {
data.table::setnames(tabular, "position.1", "position 1")
}
if (colnames(tabular)[2] != "position 1") {
stop(
"Second column of the input file must be named 'position 1'.",
call. = FALSE
)
}
if (colnames(tabular)[3] == "position.2") {
data.table::setnames(tabular, "position.2", "position 2")
}
if (colnames(tabular)[3] != "position 2") {
stop(
"Third column of the input file must be named 'position 2'.",
call. = FALSE
)
}
if (is.null(conditions) != is.null(replicates)) {
stop(
"Conditions and replicates should be both NULL, or none.",
call. = FALSE
)
}
tabular[, chromosome := as.character(chromosome)]
tabular[, chromosome := factor(
chromosome,
levels = gtools::mixedsort(unique(chromosome)))
]
setorder(tabular, chromosome, `position 1`, `position 2`)
# Assays part, fill with NA
assays <- as.matrix(tabular[,4:ncol(tabular), drop = FALSE])
if (!is.null(conditions) | !is.null(replicates)) {
if (
(length(conditions) != ncol(assays)) |
(length(replicates) != ncol(assays))
) {
stop(
"Number of conditions and replicates should match the number ",
"of counts in the matrix.",
call. = FALSE
)
}
} else {
if (!all(grepl("^.+?\\..+$", colnames(assays)))) {
stop(
"Fourth to last column of the input file must be named 'C.R', ",
"with C the condition number/name and R the replicate ",
"number/name.",
call. = FALSE
)
}
}
assays[assays == 0] <- NA
# GInteraction part
tabular <- tabular[, .(chromosome, `position 1`, `position 2`)]
data.table::setnames(tabular, "position 1", "bin.1")
data.table::setnames(tabular, "position 2", "bin.2")
diagonal <- (tabular$bin.1 == tabular$bin.2)
binSize <- .modeVector(abs(
tabular[!diagonal,]$bin.1 - tabular[!diagonal,]$bin.2
))
tabular[, bin.1 := bin.1/binSize]
tabular[, bin.2 := bin.2/binSize]
allRegions <- data.table::melt(
tabular[, .(chromosome, bin.1, bin.2)],
id.vars = "chromosome",
value.name = "indexC"
)
allRegions[, variable := NULL]
allRegions <- unique(allRegions)
setorder(allRegions, chromosome, indexC)
# Constructing unique index for all chromosomes,
# taking into account the difference in bins.
allRegions[
,
index := indexC - data.table::shift(indexC, fill = 0),
by = .(chromosome)
]
allRegions[index==0, index:=1]
allRegions[, index := cumsum(index)]
allRegions[, end := (indexC+1) * binSize]
allRegions[, start := (indexC) * binSize + 1]
data.table::setcolorder(
allRegions,
c("chromosome", "start", "end", "index", "indexC")
)
tabular <- data.table::merge.data.table(
tabular,
allRegions[, .(chromosome, startIndex = index, bin.1 = indexC)],
all.x = TRUE,
sort = FALSE,
by = c("chromosome", "bin.1")
)
tabular <- data.table::merge.data.table(
tabular,
allRegions[, .(chromosome, stopIndex = index, bin.2 = indexC)],
all.x = TRUE,
sort = FALSE,
by = c("chromosome", "bin.2")
)
tabular[, bin.1 := NULL]
tabular[, bin.2 := NULL]
allRegions[, indexC := NULL]
order1 <- match(tabular$startIndex, allRegions$index)
order2 <- match(tabular$stopIndex, allRegions$index)
allRegions <- GenomicRanges::GRanges(allRegions)
gi <- InteractionSet::GInteractions(
allRegions[order1],
allRegions[order2],
regions = allRegions,
mode="strict"
)
if (is.null(conditions)) {
conditions <- gsub("^(.+?)\\..+$", "\\1", colnames(assays))
}
if(is.null(replicates)) {
replicates <- gsub("^.+?\\.(.+)$", "\\1", colnames(assays))
}
interactionSet <- .createInteractionSet(assays, gi, allRegions, conditions, replicates)
return(interactionSet)
}
#' @description
#' Read the file, and fills it using \code{\link{.setFromTabular}}.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param sep
#' The separator of the tabular file.
#'
#' @return
#' A filled \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.parseTabular <- function(input, sep = "\t") {
message("Parsing '", input, "'.")
interactions <- data.table::fread(
file = input,
sep = sep,
header = TRUE,
# comment.char = "#",
check.names = FALSE,
data.table = TRUE,
stringsAsFactors = FALSE
)
interactionSet <- .setFromTabular(interactions)
object <- new("HiCDOCDataSet", interactionSet, input = input)
return(object)
}
#' @description
#' Parses a single interactions file in \code{.cool} or \code{.mcool} format.
#'
#' @param path
#' The path to the interactions file.
#' @param binSize
#' The resolution (span of each position in number of bases). Optionally
#' provided to select the appropriate resolution in \code{.mcool} files.
#' Defaults to NULL.
#'
#' @return
#' A data.table of interactions.
#'
#' @keywords internal
#' @noRd
.parseOneCool <- function(path, binSize = NA, replicate, condition) {
message("\nParsing '", path, "'.")
uri <- function(path) {
if (!is.numeric(binSize)) return(path)
return(
paste(
"resolutions",
format(binSize, scientific = FALSE),
path,
sep = "/"
)
)
}
bins <- data.table::data.table(
chromosome = factor(
rhdf5::h5read(file = path, name = uri("bins/chrom"))
),
start = rhdf5::h5read(file = path, name = uri("bins/start")),
end = rhdf5::h5read(file = path, name = uri("bins/end"))
)
bins[, start := as.integer(start)]
bins[, start := start+1]
bins[, end := as.integer(end)]
setorder(bins, chromosome, start, end)
bins[, index := seq_len(nrow(bins))]
interactions <- data.table::data.table(
id1 = rhdf5::h5read(file = path, name = uri("pixels/bin1_id")),
id2 = rhdf5::h5read(file = path, name = uri("pixels/bin2_id")),
interaction = rhdf5::h5read(file = path, name = uri("pixels/count"))
)
interactions[, id1 := as.integer(id1) + 1]
interactions[, id2 := as.integer(id2) + 1]
interactions[, interaction := as.numeric(interaction)]
order1 <- match(interactions$id1, bins$index)
order2 <- match(interactions$id2, bins$index)
allRegions <- GenomicRanges::GRanges(bins)
# GInteractions part
gi <- InteractionSet::GInteractions(
allRegions[order1],
allRegions[order2],
regions = allRegions,
mode="strict"
)
assay <- as.matrix(interactions$interaction, ncol = 1)
interactionSet <- .createInteractionSet(assay, gi, allRegions, condition, replicate)
return(interactionSet)
}
#' @description
#' Parses interactions in \code{.cool} or \code{.mcool} format and fills the
#' interactions slot of the provided \code{\link{HiCDOCDataSet}}.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param binSize
#' The resolution (span of each position in number of bases). Optionally
#' provided to select the appropriate resolution in \code{.mcool} files.
#' Defaults to NULL.
#'
#' @return
#' A filled \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.parseCool <- function(object, binSize = NA, replicates, conditions) {
interactionSetCool <- pbapply::pbmapply(
.parseOneCool,
path = object@input,
binSize = binSize,
condition = conditions,
replicate = replicates
)
mergedinteractionSetCool <- Reduce(
f = .mergeInteractionSet,
x = interactionSetCool
)
new(
"HiCDOCDataSet",
mergedinteractionSetCool,
input = object@input
)
}
#' @description
#' Parses a single interactions file in \code{.hic} format. Calls the C++
#' \code{parseHiCFile} parser.
#'
#' @param path
#' The path to the interactions file.
#' @param binSize
#' The resolution (span of each position in number of bases) to select within
#' the \code{.hic} file.
#'
#' @return
#' An \code{\link{InteractionSet}}.
#'
#' @keywords internal
#' @noRd
.parseOneHiC <- function(path, binSize, condition, replicate) {
message("\nParsing '", path, "'.")
interactions <- parseHiCFile(path, binSize)
# Automagical stuff to transform Rcpp DataFrame to data.table
interactions <- data.table::setalloccol(interactions)
# Create InteractionSet object
interactions <- .setFromTabular(interactions, condition, replicate)
return(interactions)
}
#' @description
#' Parses interactions in \code{.hic} format and fills the interactions slots of
#' the provided \code{\link{HiCDOCDataSet}}.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param binSize
#' The resolution (span of each position in number of bases) to select within
#' the \code{.hic} files.
#'
#' @return
#' A filled \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.parseHiC <- function(object, binSize, replicates, conditions) {
interactionSet <- pbapply::pbmapply(
.parseOneHiC,
path = object@input,
binSize = binSize,
condition = conditions,
replicate = replicates
)
mergedinteractionSet <- Reduce(
f = .mergeInteractionSet,
x = interactionSet
)
new(
"HiCDOCDataSet",
mergedinteractionSet,
input = object@input
)
}
#' @description
#' Parses a single pair of \code{.matrix} and \code{.bed} files.
#'
#' @param matrixPath
#' The path to the interactions matrix file.
#' @param bedPath
#' The path to the bed file.
#'
#' @return
#' A data.table of interactions.
#'
#' @keywords internal
#' @noRd
.parseOneHiCPro <- function(matrixPath, bedPath, replicate, condition) {
message("\nParsing '", matrixPath, "' and '", bedPath, "'.")
interactions <- data.table::fread(
matrixPath,
header = FALSE,
stringsAsFactors = FALSE,
col.names = c("startIndex", "stopIndex", "interaction"),
data.table = TRUE
)
bed <- data.table::fread(
bedPath,
header = FALSE,
stringsAsFactors = FALSE,
col.names = c("chromosome", "start", "end", "index"),
data.table = TRUE
)
setorder(bed, chromosome, start, end)
# Adding 1 to follow Bioconductor GRanges recommended format
bed[,start := start+1]
# Keeping only intra-chromosomal interactions
# Add 1 if BED index start with 0
allChromosomes <- vector("character", length = max(bed$index) + 1)
allChromosomes[bed[,index]+1] <- bed[,chromosome]
interactions <- interactions[
allChromosomes[startIndex + 1] == allChromosomes[stopIndex + 1]]
order1 <- match(interactions$startIndex, bed$index)
order2 <- match(interactions$stopIndex, bed$index)
allRegions <- GenomicRanges::GRanges(bed)
gi <- InteractionSet::GInteractions(
allRegions[order1],
allRegions[order2],
regions = allRegions,
mode="strict"
)
assay <- as.matrix(interactions$interaction, ncol=1)
interactionSet <- .createInteractionSet(assay, gi, allRegions, condition, replicate)
return(interactionSet)
}
#' @description
#' Parses interactions in pairs of \code{.matrix} and \code{.bed} files and
#' fills the interactions slots of the provided \code{\link{HiCDOCDataSet}}.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A filled \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.parseHiCPro <- function(object, replicates, conditions) {
matrixPaths <- lapply(object@input, `[[`, 1)
bedPaths <- lapply(object@input, `[[`, 2)
interactionSet <- pbapply::pbmapply(
.parseOneHiCPro,
matrixPaths,
bedPaths,
replicates,
conditions
)
mergedinteractionSet <- Reduce(f = .mergeInteractionSet, x = interactionSet)
object <- new(
"HiCDOCDataSet",
mergedinteractionSet,
input = object@input
)
return(object)
}
#' Create the object interactionSet to use in HiCDOCDataSet
#'
#' @param assay matrix of assays
#' @param gi GInteractions object
#' @param allRegions regions object
#' @param condition condition (for colData), vector of length 1 or more
#' @param replicate replicate (for colData), vector of length 1 or more
#'
#' @return an interactionSet object
#' @keywords internal
#' @noRd
.createInteractionSet <- function(assay, gi, allRegions, condition, replicate){
# Add 0 on the diagonal if there is only off diagonal interaction
ids <- InteractionSet::anchors(gi, id = TRUE)
idsDiagonals <- ids$first[ids$first == ids$second]
notPresent <- setdiff(unique(c(ids$first, ids$second)), idsDiagonals)
nb <- length(notPresent)
colnames(assay) <- NULL
if(nb>0){
notPresentRegions <- allRegions[match(notPresent, allRegions$index)]
gi <- c(gi,
InteractionSet::GInteractions(
notPresentRegions,
notPresentRegions,
regions = allRegions,
mode="strict"))
assay <- rbind(assay, as.matrix(rep(0, nb*ncol(assay)),
ncol=ncol(assay),
nrow=nb))
}
interactionSet <- InteractionSet::InteractionSet(
assays = assay,
interactions = gi,
colData=S4Vectors::DataFrame(
"condition" = condition,
"replicate" = replicate
)
)
# Keep only intra-chromosomal interactions
interactionSet <- interactionSet[InteractionSet::intrachr(interactionSet),]
return(interactionSet)
}
<file_sep>#" Based on
#" https://github.com/dozmorovlab/HiCcompare/blob/master/R/KRnormalization.R
#"
#' @description
#' Applies the Knight-Ruiz balancing algorithm to transform the provided
#' matrix into a doubly stochastic matrix, with sum of each row and sum of each
#' column equal to 1.
#'
#' @param m
#' A matrix.
#'
#' @return
#' The transformed matrix.
#'
#' @keywords internal
#' @noRd
.normalizeKnightRuiz <- function(cm) {
m <- cm@matrix
m[is.na(m)] <- 0
tol <- 1e-6
minDelta <- 0.1
maxDelta <- 3
n <- nrow(m)
e <- matrix(1, nrow = n, ncol = 1)
x0 <- e
g <- 0.9
etamax <- 0.1
eta <- etamax
stop_tol <- tol * .5
x <- x0
rt <- tol^2
v <- x * (m %*% x)
rk <- 1 - v
rho_km1 <- drop(t(rk) %*% rk)
rout <- rho_km1
rold <- rout
while (rout > rt) {
k <- 0
y <- e
innertol <- max(c(eta^2 * rout, rt))
while (rho_km1 > innertol) {
k <- k + 1
if (k == 1) {
Z <- rk / v
p <- Z
rho_km1 <- drop(t(rk) %*% Z)
} else {
beta <- rho_km1 / rho_km2
p <- Z + beta * p
}
w <- x * (m %*% (x * p)) + v * p
if (max(w) == Inf) {
warning("KR algorithm diverges.", call. = FALSE)
return(t(t(x[, 1] * m) * x[, 1]))
}
alpha <- rho_km1 / drop(t(p) %*% w)
ap <- alpha * p
ynew <- y + ap
if (min(ynew) <= minDelta) {
if (minDelta == 0) break()
ind <- which(ap < 0)
gamma <- min((minDelta - y[ind]) / ap[ind])
y <- y + gamma * ap
break()
}
if (max(ynew) >= maxDelta) {
ind <- which(ynew > maxDelta)
gamma <- min((maxDelta - y[ind]) / ap[ind])
y <- y + gamma * ap
break()
}
y <- ynew
rk <- rk - alpha * w
rho_km2 <- rho_km1
Z <- rk / v
rho_km1 <- drop(t(rk) %*% Z)
}
x <- x * y
v <- x * (m %*% x)
rk <- 1 - v
rho_km1 <- drop(t(rk) %*% rk)
rout <- rho_km1
rat <- rout / rold
rold <- rout
res_norm <- sqrt(rout)
eta_o <- eta
eta <- g * rat
if (g * eta_o^2 > 0.1) eta <- max(c(eta, g * eta_o^2))
eta <- max(c(min(c(eta, etamax)), stop_tol / res_norm))
}
result <- t(t(x[, 1] * m) * x[, 1])
cm@matrix <- result
return(cm)
}
#' @description
#' Normalizes biological biases in the interactions of a given chromosome. Calls
#' \code{.normalizeKnightRuiz} internally.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome to normalize.
#'
#' @return
#' A data.table of normalized interactions.
#'
#' @keywords internal
#' @noRd
.normalizeBiologicalBiasesOfChromosome <- function(object) {
chromosomeName <- as.character(
SummarizedExperiment::mcols(object)$chromosome[1]
)
message("Chromosome ", chromosomeName, ": normalizing biological biases.")
if (object@totalBins[[chromosomeName]] <= 0) {
return(NULL)
}
currentOrder <- InteractionSet::anchorIds(object)
currentAssay <- SummarizedExperiment::assay(object)
# Pass by InteractionSet so we can use inflate/deflate
chromosomeInteractionSet <- InteractionSet::InteractionSet(
currentAssay,
InteractionSet::interactions(object)
)
validAssay <- object@validAssay[[chromosomeName]]
matrices <- lapply(
validAssay,
FUN = function(x) {
InteractionSet::inflate(
chromosomeInteractionSet,
rows = chromosomeName,
columns = chromosomeName,
sample = x,
sparse = FALSE
)
}
)
matrices <- lapply(
matrices,
function(m) {
m@matrix[is.na(m@matrix)] <- 0
return(m)
}
)
matrices <- lapply(matrices, .normalizeKnightRuiz)
matrices <- lapply(
matrices,
function(m) {
m@matrix[m@matrix == 0] <- NA
return(m)
}
)
matrices <- lapply(matrices, InteractionSet::deflate, use.na = TRUE)
ids <- InteractionSet::anchorIds(matrices[[1]])
ids <- paste(ids$first, ids$second)
correctIds <- paste(currentOrder$first, currentOrder$second)
orderids <- match(correctIds, ids)
matrices <- lapply(matrices, function(x) SummarizedExperiment::assay(x))
matrices <- lapply(matrices, function(x) x[orderids, ])
matrices <- do.call(base::"cbind", matrices)
currentAssay[, validAssay] <- matrices
return(currentAssay)
}
#' @title
#' Normalize biological biases.
#'
#' @description
#' Normalizes biological biases such as GC content and repeated regions. Uses
#' the Knight-Ruiz balancing algorithm to transform interaction matrices into
#' doubly stochastic matrices, with sum of each row and sum of each column equal
#' to 1.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param parallel
#' Should the normalization be run in parallel mode? Default to FALSE.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}} with normalized interactions.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' object <- exampleHiCDOCDataSet
#' object <- filterSparseReplicates(object)
#' object <- filterWeakPositions(object)
#' object <- normalizeBiologicalBiases(object)
#'
#' @seealso
#' \code{\link{filterSparseReplicates}},
#' \code{\link{filterWeakPositions}},
#' \code{\link{normalizeTechnicalBiases}},
#' \code{\link{normalizeDistanceEffect}},
#' \code{\link{HiCDOC}}
#'
#' @export
normalizeBiologicalBiases <- function(object, parallel = FALSE) {
.validateSlots(
object,
slots = c(
"totalBins",
"validAssay"
)
)
objectChromosomes <- S4Vectors::split(
object,
SummarizedExperiment::mcols(object)$chromosome,
drop = FALSE
)
normAssay <- .internalLapply(
parallel,
objectChromosomes,
FUN = .normalizeBiologicalBiasesOfChromosome
)
normAssay <- do.call("rbind", normAssay)
SummarizedExperiment::assay(object) <- normAssay
return(object)
}
<file_sep>test_that("HiCDOCDataSetFromPro works as expected", {
# Path to each matrix file
matrixPaths <-
system.file("extdata", "liver_18_10M_500000.matrix", package = "HiCDOC")
# Path to each bed file
bedPaths <-
system.file("extdata", "liver_18_10M_500000.bed", package = "HiCDOC")
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- 1
conditions <- 1
# Instantiation of data set
expect_message(
object <- HiCDOCDataSetFromHiCPro(
matrixPaths = matrixPaths,
bedPaths = bedPaths,
replicates = replicates,
conditions = conditions
),
"liver_18_10M_500000.matrix")
expect_equal(length(object), 210)
expect_identical(object@chromosomes, "18")
expect_identical(object$condition, 1)
expect_identical(object$replicate, 1)
})
<file_sep>#' @description
#' Removes sparse replicates of a given chromosome.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome.
#' @param threshold
#' The minimum percentage of non-zero interactions for a replicate to be kept.
#'
#' @return
#' A list of:
#' - The sparse condition names repeated along the sparse replicates.
#' - The sparse replicate names repeated along the sparse conditions.
#' - The filtered interactions.
#'
#' @keywords internal
#' @noRd
.filterSparseReplicatesOfChromosome <- function(
assay,
diagonal,
chromosomeName,
totalBins,
threshold,
validAssay,
conditions,
replicates
) {
filledAssay <- diagonal * (!is.na(assay) & assay > 0)
filledPercentage <- colSums(filledAssay) / (totalBins * totalBins)
toRemove <- which(filledPercentage < threshold)
toRemove <- toRemove[toRemove %in% validAssay]
if (length(toRemove) > 0) {
message(
paste(
"\nRemoved interactions matrix of chromosome ",
chromosomeName,
", condition ",
conditions[toRemove],
", replicate ",
replicates[toRemove],
" filled at ",
round(filledPercentage[toRemove], digits = 5) * 100,
"%.",
collapse = "\n",
sep = ""
)
)
}
assay[, toRemove] <- NA
return(assay)
}
#' @title
#' Filter sparse replicates.
#'
#' @description
#' Removes chromosome replicates whose percentage of non-zero interactions is
#' smaller than the threshold.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param threshold
#' The minimum percentage of non-zero interactions for a chromosome replicate to
#' be kept. If a chromosome replicate's percentage of non-zero interactions is
#' lower than this value, it is removed. Defaults to
#' \code{object$smallChromosomeThreshold} which is originally set to
#' \code{defaultHiCDOCParameters$smallChromosomeThreshold = 30\%}.
#'
#' @return
#' A filtered \code{\link{HiCDOCDataSet}}.
#'
#' @seealso
#' \code{\link{filterSmallChromosomes}},
#' \code{\link{filterWeakPositions}},
#' \code{\link{HiCDOC}}
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' object <- exampleHiCDOCDataSet
#'
#' object <- filterSparseReplicates(object)
#'
#' @export
filterSparseReplicates <- function(object, threshold = NULL) {
.validateSlots(
object,
slots = c(
"chromosomes",
"parameters"
)
)
if (!is.null(threshold)) {
object@parameters$sparseReplicateThreshold <- threshold
}
object@parameters <- .validateParameters(object@parameters)
threshold <- object@parameters$sparseReplicateThreshold
message(
"Keeping replicates filled with at least ",
threshold * 100,
"% non-zero interactions."
)
diagonal <- InteractionSet::anchors(object)
diagonal <- diagonal$first == diagonal$second
diagonal <- 2 - 1 * diagonal
diagonals <- S4Vectors::split(
diagonal,
SummarizedExperiment::mcols(object)$chromosome,
drop = FALSE
)
chromosomeAssays <- S4Vectors::split(
SummarizedExperiment::assay(object),
SummarizedExperiment::mcols(object)$chromosome,
drop = FALSE
)
resultAssay <- pbapply::pbmapply(
function(a, d, c, t, v) {
.filterSparseReplicatesOfChromosome(
a, d, c, t, threshold, v, object$condition, object$replicate
)
},
chromosomeAssays,
diagonals,
object@chromosomes,
object@totalBins,
object@validAssay,
SIMPLIFY = FALSE
)
resultAssay <- do.call("rbind", resultAssay)
if (nrow(resultAssay) != nrow(object)) {
stop("Something went wrong")
}
SummarizedExperiment::assay(object) <- resultAssay
newValidAssay <- .determineValids(object)
badChromosomes <- vapply(
newValidAssay,
function(x) length(x) == 0,
FUN.VALUE = TRUE
)
newValidAssay[badChromosomes] <- list(NULL)
totalRemovedReplicates <- (
length(unlist(object@validAssay)) - length(unlist(newValidAssay))
)
object@validAssay <- newValidAssay
rowsToSuppress <- (
rowSums(SummarizedExperiment::assay(object), na.rm = TRUE) == 0
)
if (sum(rowsToSuppress) > 0) {
object <- object[!rowsToSuppress, ]
object <- InteractionSet::reduceRegions(object)
# Remove empty chromosomes
if(sum(badChromosomes)>0){
leftChromosomes <- object@chromosomes[!badChromosomes]
print(leftChromosomes)
object <- reduceHiCDOCDataSet(object, chromosomes = leftChromosomes)
}
}
message(
"Removed ",
totalRemovedReplicates,
" replicate",
if (totalRemovedReplicates != 1) "s",
" in total."
)
if (nrow(object) == 0) {
warning("No data left!", call. = FALSE)
}
return(object)
}
<file_sep>test_that("plotCentroids returns error if no centroids", {
data(exampleHiCDOCDataSet)
expect_error(
pp <- plotCentroids(exampleHiCDOCDataSet),
"No compartments found."
)
})
test_that("plotCentroids behaves as expected", {
data(exampleHiCDOCDataSetProcessed)
expect_error(plotCentroids(exampleHiCDOCDataSetProcessed),
"argument \"chromosome\"")
expect_error(plotCentroids(exampleHiCDOCDataSetProcessed, 5), "Unknown")
pp <- plotCentroids(exampleHiCDOCDataSetProcessed, 1)
expect_is(pp, "ggplot")
expect_identical(
unlist(pp$labels),
c("caption" = "Quality controls:\nCentroid PC1 inertia: OK\nA/B clustering consistency: OK",
"x" = "PC1 91.19 %",
"y" = "PC2 6.82 %",
"title" = "PCA on centroids of chromosome X",
"colour" = "compartment",
"shape" = "condition"
)
)
expect_is(pp$layers[[1]]$geom, "GeomPoint")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep>// Generated by using Rcpp::compileAttributes() -> do not edit by hand
// Generator token: <PASSWORD>
#include <Rcpp.h>
using namespace Rcpp;
#ifdef RCPP_USE_GLOBAL_ROSTREAM
Rcpp::Rostream<true>& Rcpp::Rcout = Rcpp::Rcpp_cout_get();
Rcpp::Rostream<false>& Rcpp::Rcerr = Rcpp::Rcpp_cerr_get();
#endif
// constrainedClustering
List constrainedClustering(NumericMatrix& rMatrix, IntegerMatrix& rLinks, double maxDelta, int maxIterations, int totalRestarts, int totalClusters);
RcppExport SEXP _HiCDOC_constrainedClustering(SEXP rMatrixSEXP, SEXP rLinksSEXP, SEXP maxDeltaSEXP, SEXP maxIterationsSEXP, SEXP totalRestartsSEXP, SEXP totalClustersSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix& >::type rMatrix(rMatrixSEXP);
Rcpp::traits::input_parameter< IntegerMatrix& >::type rLinks(rLinksSEXP);
Rcpp::traits::input_parameter< double >::type maxDelta(maxDeltaSEXP);
Rcpp::traits::input_parameter< int >::type maxIterations(maxIterationsSEXP);
Rcpp::traits::input_parameter< int >::type totalRestarts(totalRestartsSEXP);
Rcpp::traits::input_parameter< int >::type totalClusters(totalClustersSEXP);
rcpp_result_gen = Rcpp::wrap(constrainedClustering(rMatrix, rLinks, maxDelta, maxIterations, totalRestarts, totalClusters));
return rcpp_result_gen;
END_RCPP
}
// parseHiCFile
DataFrame parseHiCFile(std::string& fname, int resolution);
RcppExport SEXP _HiCDOC_parseHiCFile(SEXP fnameSEXP, SEXP resolutionSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< std::string& >::type fname(fnameSEXP);
Rcpp::traits::input_parameter< int >::type resolution(resolutionSEXP);
rcpp_result_gen = Rcpp::wrap(parseHiCFile(fname, resolution));
return rcpp_result_gen;
END_RCPP
}
static const R_CallMethodDef CallEntries[] = {
{"_HiCDOC_constrainedClustering", (DL_FUNC) &_HiCDOC_constrainedClustering, 6},
{"_HiCDOC_parseHiCFile", (DL_FUNC) &_HiCDOC_parseHiCFile, 2},
{NULL, NULL, 0}
};
RcppExport void R_init_HiCDOC(DllInfo *dll) {
R_registerRoutines(dll, NULL, CallEntries, NULL, NULL);
R_useDynamicSymbols(dll, FALSE);
}
<file_sep>#' @title
#' Example HiCDOCDataSet.
#'
#' @description
#' A S4 HiCDOCDataSet object with 4 chromosomes, 3 conditions and 3 replicates.
#'
#' @format
#' S4 HiCDOCDataSet object with the following characteristics:
#' \describe{
#' \item{chromosomes}{4 chromosomes: W, X, Y, Z}
#' \item{conditions}{3 conditions: 1, 2, 3}
#' \item{replicates}{3 replicates: R1, R2, R3}
#' \item{binSize}{A resolution of 137 bases}
#' }
#'
#' @usage data(exampleHiCDOCDataSet)
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' exampleHiCDOCDataSet
"exampleHiCDOCDataSet"
#' @title
#' Example HiCDOCDataSet, filtered, normalized and with compartements detected.
#'
#' @description
#' A S4 HiCDOCDataSet object with 3 chromosomes, 3 conditions and 3 replicates.
#' Can be retrieved by running :
#' \code{data(exampleHiCDOCDataSet);
#' set.seed(123);
#' exampleHiCDOCDataSetProcessed <- HiCDOC(exampleHiCDOCDataSet)}
#'
#' @format
#' S4 HiCDOCDataSet object with the following characteristics:
#' \describe{
#' \item{chromosomes}{4 chromosomes: X, Y, Z}
#' \item{conditions}{3 conditions: 1, 2, 3}
#' \item{replicates}{3 replicates: R1, R2, R3}
#' \item{binSize}{A resolution of 137 bases}
#' }
#'
#' @usage data(exampleHiCDOCDataSetProcessed)
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}, already filtered and normalized.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' exampleHiCDOCDataSetProcessed
"exampleHiCDOCDataSetProcessed"
<file_sep>#' @title
#' Filter small chromosomes.
#'
#' @description
#' Removes chromosomes whose length (in number of positions) is smaller than the
#' threshold.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param threshold
#' The minimum length (number of positions) for a chromosome to be kept.
#' Defaults to \code{object$smallChromosomeThreshold} which is originally set to
#' \code{defaultHiCDOCParameters$smallChromosomeThreshold} = 100.
#'
#' @return
#' A filtered \code{\link{HiCDOCDataSet}}.
#'
#' @seealso
#' \code{\link{filterSparseReplicates}},
#' \code{\link{filterWeakPositions}},
#' \code{\link{HiCDOC}}
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' object <- exampleHiCDOCDataSet
#'
#' chromosomes(object)
#' object <- filterSmallChromosomes(object)
#' chromosomes(object)
#'
#' @export
filterSmallChromosomes <- function(object, threshold = NULL) {
.validateSlots(
object,
slots = c("chromosomes", "totalBins", "parameters")
)
if (!is.null(threshold)) {
object@parameters$smallChromosomeThreshold <- threshold
}
object@parameters <- .validateParameters(object@parameters)
threshold <- object@parameters$smallChromosomeThreshold
message(
"Keeping chromosomes with at least ",
threshold,
" position",
if (threshold != 1)
"s",
"."
)
bigChromosomes <- vapply(
object@totalBins,
function(totalBins) totalBins >= threshold,
FUN.VALUE = TRUE
)
bigChromosomeNames <- names(bigChromosomes)[bigChromosomes]
bigChromosomeNames <- gtools::mixedsort(bigChromosomeNames)
smallChromosomeNames <- object@chromosomes[
!(object@chromosomes %in% bigChromosomeNames)
]
object <- reduceHiCDOCDataSet(
object,
chromosomes = bigChromosomeNames,
dropLevels = TRUE
)
message(
"Kept ",
length(bigChromosomeNames),
" chromosome",
if (length(bigChromosomeNames) != 1) "s",
if (length(bigChromosomeNames) > 0) ": " else ".",
paste(bigChromosomeNames, collapse = ", ")
)
message(
"Removed ",
length(smallChromosomeNames),
" chromosome",
if (length(smallChromosomeNames) != 1) "s",
if (length(smallChromosomeNames) > 0) ": " else ".",
paste(smallChromosomeNames, collapse = ", ")
)
if (length(bigChromosomeNames) == 0) {
warning("No data left!", call. = FALSE)
}
return(object)
}
<file_sep>test_that("plotConcordanceDifferences returns an error of no compartments", {
data(exampleHiCDOCDataSet)
expect_error(
pp <- plotConcordanceDifferences(exampleHiCDOCDataSet),
"Missing slots: comparisons"
)
})
test_that("plotConcordanceDifferences behaves as expected", {
data(exampleHiCDOCDataSetProcessed)
expect_error(
pp <- plotConcordanceDifferences(exampleHiCDOCDataSetProcessed),
NA
)
expect_is(pp, "ggplot")
expect_identical(
unlist(pp$labels),
c(
"x" = "Concordance",
"fill" = "Change\nof\ncompartment",
"title" = "Distribution of concordance differences",
"y" = "count",
"weight" = "weight"
)
)
expect_is(pp$layers[[1]]$geom, "GeomBar")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep>#' @title
#' Normalize technical biases.
#'
#' @description
#' Normalizes technical biases such as sequencing depth by using a cyclic loess
#' to recursively normalize each pair of interaction matrices. Depends on
#' \code{multiHiCcompare}.
#'
#' @details
#' \subsection{Parallel processing}{
#' If \code{parallel = TRUE}, the function
#' \code{\link[multiHiCcompare]{cyclic_loess}}
#' is launched in parallel mode, using \code{\link[BiocParallel]{bplapply}}
#' function. Before to call the function in parallel you should specify
#' the parallel parameters such as:
#' \itemize{
#' \item{On Linux:
#'
#' \code{multiParam <- BiocParallel::MulticoreParam(workers = 10)}
#' }
#' \item{On Windows:
#'
#' \code{multiParam <- BiocParallel::SnowParam(workers = 10)}
#' }
#' }
#' And then you can register the parameters to be used by BiocParallel:
#'
#' \code{BiocParallel::register(multiParam, default = TRUE)}
#' }
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param parallel
#' Logical. Whether or not to parallelize the processing. Defaults to FALSE
#' @param cyclicLoessSpan
#' A numeric value in between 0 and 1. The span for cyclic loess normalization.
#' This value is passed to \code{multiHiCcompare::cyclic_loess}.
#' Defaults to NULL, NULL indicates that the value of
#' parameters(object)$cyclicLoessSpan will be used.
#' If this value is NA, the span will be automatically calculated using
#' generalized cross validation. **For large dataset, it is highly recommended
#' to set this value to reduce computing time and necessary memory.**
#'
#' @return
#' A \code{\link{HiCDOCDataSet}} with normalized interactions.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' object <- filterSmallChromosomes(exampleHiCDOCDataSet)
#' object <- filterSparseReplicates(object)
#' object <- filterWeakPositions(object)
#' # Not printing loess warnings for example purpose.
#' # Results should be inspected if there is any.
#' suppressWarnings(
#' object <- normalizeTechnicalBiases(object)
#' )
#'
#'
#' @seealso
#' \code{\link{filterSparseReplicates}},
#' \code{\link{filterWeakPositions}},
#' \code{\link{normalizeBiologicalBiases}},
#' \code{\link{normalizeDistanceEffect}},
#' \code{\link{HiCDOC}}
#'
#' @export
normalizeTechnicalBiases <-
function(object, parallel = FALSE, cyclicLoessSpan = NULL) {
message("Normalizing technical biases.")
if (!is.null(cyclicLoessSpan)) {
object@parameters$cyclicLoessSpan <- cyclicLoessSpan
}
hic_table <- as.data.table(InteractionSet::interactions(object))
hic_table <- hic_table[, .(
chromosome = seqnames1,
region1 = start1,
region2 = start2
)]
if (!is.factor(hic_table$chromosome)) {
hic_table[, chromosome := as.factor(chromosome)]
}
hic_table[, chromosome := as.numeric(chromosome)]
currentAssay <- SummarizedExperiment::assay(object)
currentAssay[is.na(currentAssay)] <- 0
# Reordering columns in condition order
refOrder <- paste(object$condition, object$replicate, sep = ".")
currentAssay <- currentAssay[, order(refOrder), drop=FALSE]
table_list <- lapply(
seq_len(ncol(currentAssay)),
function(x) cbind(hic_table, currentAssay[, x])
)
experiment <- multiHiCcompare::make_hicexp(
data_list = table_list,
groups = sort(object$condition),
remove_zeros = FALSE,
filter = TRUE,
zero.p = 1,
A.min = 0,
remove.regions = NULL
)
normalized <-
multiHiCcompare::cyclic_loess(
experiment,
parallel = parallel,
span = object@parameters$cyclicLoessSpan
)
normalized <- multiHiCcompare::hic_table(normalized)
data.table::setnames(normalized, "chr", "chromosome")
# Re-sorting the rows in the same order as original
data.table::setindexv(normalized, c("chromosome", "region1", "region2"))
data.table::setindexv(hic_table, c("chromosome", "region1", "region2"))
hic_table <- data.table::merge.data.table(
hic_table,
normalized,
sort = FALSE
)
currentAssay <- as.matrix(hic_table[, 5:ncol(hic_table)])
# Reordering columns in original order
currentAssay <- currentAssay[, match(refOrder, sort(refOrder))]
colnames(currentAssay) <- NULL
currentAssay[currentAssay == 0] <- NA
SummarizedExperiment::assay(object, withDimnames = FALSE) <- currentAssay
return(object)
}
<file_sep>data(exampleHiCDOCDataSet)
test_that("filterSmallChromosomes behave as expected with default filter", {
# No filter on the example dataset
expect_message(
object <- filterSmallChromosomes(exampleHiCDOCDataSet),
"Keeping chromosomes with at least 100 positions."
)
expect_equal(length(object@chromosomes), 3)
expect_equal(nrow(object), 40240)
assay <- SummarizedExperiment::assay(object)
expect_equal(sum(!is.na(assay)), 166150)
expect_equal(colSums(assay, na.rm=TRUE),
c(7891910, 20681568, 7810636, 28374121,
15745554, 21976428, 21352885))
})
test_that("filterSmallChromosomes behave as expected with custom filter", {
# Filter on 1 chromosome
expect_message(
object <- filterSmallChromosomes(exampleHiCDOCDataSet, 161),
"Keeping chromosomes with at least 161 positions."
)
expect_identical(object@chromosomes, "Z")
expect_equal(nrow(object), 20100)
expect_identical(object@parameters$smallChromosomeThreshold, 161)
assay <- SummarizedExperiment::assay(object)
expect_equal(sum(!is.na(assay)), 40200)
expect_equal(colSums(assay, na.rm=TRUE),
c(0, 0, 0, 22706459,
0, 0, 7635309))
})
<file_sep>test_that("plotCompartments returns error if no compartments", {
data(exampleHiCDOCDataSet)
expect_error(
pp <- plotCompartments(exampleHiCDOCDataSet),
"No compartments found."
)
})
test_that("plotCompartments behaves as expected", {
data(exampleHiCDOCDataSetProcessed)
expect_error(plotCompartments(exampleHiCDOCDataSetProcessed),
"argument \"chromosome\"")
expect_error(plotCompartments(exampleHiCDOCDataSetProcessed, 5), "Unknown")
pp <- plotCompartments(exampleHiCDOCDataSetProcessed, 1)
expect_is(pp, "ggplot")
expect_identical(
unlist(pp$labels),
c("title" = "Compartments of chromosome X by condition",
"x" = "position",
"fill" = "compartment",
"y" = "count",
"weight" = "weight"
)
)
expect_is(pp$layers[[1]]$geom, "GeomBar")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep>test_that("plotInteractions behaves as expected", {
data(exampleHiCDOCDataSet)
object <- reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = c("X", "Y"))
expect_error(plotInteractions(object, 3), "Unknown chromosome")
expect_error(plotInteractions(object), '"chromosome"')
pp <- plotInteractions(object, 1)
expect_is(pp, "ggplot")
expect_equal(pp$labels$title, "Chromosome X")
expect_equal(pp$labels$x, "")
expect_equal(pp$labels$y, "")
expect_equal(pp$labels$z, "interaction")
expect_equal(pp$labels$fill, "interaction")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep># code inspired from multiHiCcompare package (fonction smartApply)
.internalLapply <- function(parallel, ...) {
if (parallel) {
if (!is.null(BiocParallel::bpparam()$RNGseed)) {
warning(
"The use of RNGseed may not be ensured ",
"See ?detectCompartments for more details",
call. = FALSE
)
}
BiocParallel::bplapply(..., BPPARAM = BiocParallel::bpparam())
} else {
pbapply::pblapply(...)
}
}
<file_sep>test_that("HiCDOCDataSetFromCool works as expected", {
paths <-
system.file("extdata", "liver_18_10M_500000.cool", package = "HiCDOC")
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- "GG"
conditions <- "AA"
# Instantiation of data set
expect_message(
object <- HiCDOCDataSetFromCool(
paths,
replicates = replicates,
conditions = conditions
),
"liver_18_10M_500000.cool'")
matAssay <- SummarizedExperiment::assay(object)
expect_equal(dim(matAssay), c(210, 1))
expect_identical(object@chromosomes, "18")
expect_identical(object$replicate, c("GG"))
expect_identical(object$condition, c("AA"))
})
test_that("HiCDOCDataSetFromCool works as expected if mcool", {
paths <-
system.file("extdata", "liver_18_10M.mcool", package = "HiCDOC")
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- "A"
conditions <- "C"
# Resolution to select in .mcool files
binSize <- 500000
# Instantiation of data set
expect_message(
object <- HiCDOCDataSetFromCool(
paths,
replicates = replicates,
conditions = conditions,
binSize = binSize
),
"liver_18_10M.mcool")
matAssay <- SummarizedExperiment::assay(object)
expect_equal(dim(matAssay), c(210, 1))
expect_identical(object@chromosomes, "18")
expect_identical(object$replicate, c("A"))
expect_identical(object$condition, c("C"))
})
<file_sep>data("exampleHiCDOCDataSet")
data("exampleHiCDOCDataSetProcessed")
test_chromosome_levels <- function(object, expectedLevels){
expect_identical(levels(object@distances$chromosome), expectedLevels)
expect_identical(levels(object@selfInteractionRatios$chromosome), expectedLevels)
expect_identical(seqlevels(object@compartments), expectedLevels)
expect_identical(seqlevels(object@concordances), expectedLevels)
expect_identical(seqlevels(object@differences), expectedLevels)
expect_identical(levels(object@centroids$chromosome), expectedLevels)
}
test_condition_levels <- function(object, expectedLevels){
expect_identical(levels(object@distances$condition), expectedLevels)
expect_identical(levels(object@selfInteractionRatios$condition),
expectedLevels)
expect_identical(levels(object@compartments$condition), expectedLevels)
expect_identical(levels(object@concordances$condition), expectedLevels)
expect_identical(levels(object@centroids$condition), expectedLevels)
}
test_replicate_levels <- function(object, expectedLevels){
expect_identical(levels(object@distances$replicate), expectedLevels)
expect_identical(levels(object@selfInteractionRatios$replicate),
expectedLevels)
expect_identical(levels(object@concordances$replicate), expectedLevels)
}
test_that("reduceHiCDOCDataSet return correct errors", {
# On chromosomes
expect_error(
reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = c(5, 6)),
"Unknown chromosomes"
)
expect_error(
reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = "chr1"),
"Unknown chromosome"
)
# On conditions
expect_error(
reduceHiCDOCDataSet(exampleHiCDOCDataSet, conditions = c(3, 4)),
"Unknown condition: 4"
)
expect_error(
reduceHiCDOCDataSet(exampleHiCDOCDataSet, conditions = "cond1"),
"Unknown condition"
)
# On replicates
expect_error(
reduceHiCDOCDataSet(exampleHiCDOCDataSet, replicates = c(3, 4)),
"Unknown replicates"
)
expect_error(
reduceHiCDOCDataSet(exampleHiCDOCDataSet, replicates = "rep1"),
"Unknown replicate"
)
})
test_that("reduceHiCDOCDataSet works if select chromosome, dropLevels", {
expect_warning(
objectRed <- reduceHiCDOCDataSet(exampleHiCDOCDataSetProcessed,
chromosomes = "X"),
"You should not reduce a HiCDOCDataSet after"
)
# Chromosomes
expect_identical(objectRed@chromosomes, "X")
expect_identical(objectRed@totalBins, c("X" = 120))
expect_identical(objectRed@weakBins, list("X" = c(171, 200)))
# Doesn't remove replicates & conditions
expect_identical(objectRed$replicate,
c("R2", "R1", "R1", "R2", "R2", "R1", "R3"))
expect_identical(objectRed$condition, c("2", "1", "2", "1", "3", "3", "1"))
# Interactions
expect_equal(dim(SummarizedExperiment::assay(objectRed)), c(7021, 7))
expect_equal(round(sum(SummarizedExperiment::assay(objectRed), na.rm=TRUE),2),
36.77)
# Objects produced by detectCompartments
test_chromosome_levels(objectRed, "X")
test_condition_levels(objectRed, c("1", "2", "3"))
test_replicate_levels(objectRed, c("R1", "R2", "R3"))
})
test_that("reduceHiCDOCDataSet works if select chromosome, keep levels", {
expect_warning(
objectRed <- reduceHiCDOCDataSet(
exampleHiCDOCDataSetProcessed,
chromosomes = "X",
dropLevels = FALSE
),
"You should not reduce a HiCDOCDataSet after"
)
# Chromosomes
expect_identical(objectRed@chromosomes, "X")
expect_identical(objectRed@totalBins, c("X" = 120))
expect_identical(objectRed@weakBins, list("X" = c(171, 200)))
# Doesn't remove replicates & conditions
expect_identical(objectRed$replicate, exampleHiCDOCDataSetProcessed$replicate)
expect_identical(objectRed$condition, exampleHiCDOCDataSetProcessed$condition)
# Interactions
expect_equal(dim(SummarizedExperiment::assay(objectRed)), c(7021, 7))
expect_equal(round(sum(SummarizedExperiment::assay(objectRed), na.rm=TRUE),2),
36.77)
# Objects prduced by detectCompartments
test_chromosome_levels(objectRed, c("X", "Y", "Z"))
test_condition_levels(objectRed, c("1", "2", "3"))
test_replicate_levels(objectRed, c("R1", "R2", "R3"))
})
test_that("reduceHiCDOCDataSet works if select condition, drop levels", {
expect_warning(
objectRed <- reduceHiCDOCDataSet(
exampleHiCDOCDataSetProcessed,
conditions = c(1, 2),
dropLevels = TRUE
),
"You should not reduce a HiCDOCDataSet after"
)
# Chromosomes
expect_identical(objectRed@chromosomes, exampleHiCDOCDataSetProcessed@chromosomes)
expect_identical(objectRed@totalBins, exampleHiCDOCDataSetProcessed@totalBins)
expect_identical(objectRed@weakBins, exampleHiCDOCDataSetProcessed@weakBins)
expect_identical(objectRed$replicate, c("R2", "R1", "R1", "R2", "R3"))
expect_identical(objectRed$condition, c("2", "1", "2", "1", "1"))
# Interactions
expect_equal(dim(SummarizedExperiment::assay(objectRed)), c(39524, 5))
expect_equal(sum(SummarizedExperiment::assay(objectRed), na.rm=TRUE),
115.80, tolerance=1e-2)
# Objects prduced by detectCompartments
test_chromosome_levels(objectRed, c("X", "Y", "Z"))
test_condition_levels(objectRed, c("1", "2"))
test_replicate_levels(objectRed, c("R1", "R2", "R3"))
})
test_that("reduceHiCDOCDataSet works if select replicate, drop levels", {
expect_warning(
objectRed <- reduceHiCDOCDataSet(
exampleHiCDOCDataSetProcessed,
replicate = "R1",
dropLevels = TRUE
),
"You should not reduce a HiCDOCDataSet after"
)
# Chromosomes
expect_identical(objectRed@chromosomes, exampleHiCDOCDataSetProcessed@chromosomes)
expect_identical(objectRed@totalBins, exampleHiCDOCDataSetProcessed@totalBins)
expect_identical(objectRed@weakBins, exampleHiCDOCDataSetProcessed@weakBins)
expect_identical(objectRed$replicate, c("R1", "R1", "R1"))
expect_identical(objectRed$condition, c("1", "2", "3"))
# Interactions
expect_equal(dim(SummarizedExperiment::assay(objectRed)), c(39524, 3))
expect_equal(sum(SummarizedExperiment::assay(objectRed), na.rm=TRUE),
48.62, tolerance=1e-2)
# Objects prduced by detectCompartments
test_chromosome_levels(objectRed, c("X", "Y", "Z"))
test_condition_levels(objectRed, c("1", "2", "3"))
test_replicate_levels(objectRed, c("R1"))
})
test_that("reduceHiCDOCDataSet works if select chr, cond & rep, keep levels", {
expect_warning(
objectRed <- reduceHiCDOCDataSet(
exampleHiCDOCDataSetProcessed,
chromosome = "X",
replicate = "R1",
condition = "1",
dropLevels = FALSE
),
"You should not reduce a HiCDOCDataSet after"
)
# Chromosomes
expect_identical(objectRed@chromosomes, "X")
expect_identical(objectRed@totalBins, c("X" = 120))
expect_identical(objectRed@weakBins, list("X" = c(171, 200)))
expect_identical(objectRed$replicate, "R1")
expect_identical(objectRed$condition, "1")
# Interactions
expect_equal(dim(SummarizedExperiment::assay(objectRed)), c(7021, 1))
expect_equal(sum(SummarizedExperiment::assay(objectRed), na.rm=TRUE),
-12.19, tolerance=1e-2)
# Objects prduced by detectCompartments
test_chromosome_levels(objectRed, c("X", "Y", "Z"))
test_condition_levels(objectRed, c("1", "2", "3"))
test_replicate_levels(objectRed, c("R1", "R2", "R3"))
})
<file_sep>#' @description
#' Normalizes the distance effect on the interactions of a given chromosome.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome to normalize.
#'
#' @return
#' A data.table of normalized interactions.
#'
#' @keywords internal
#' @noRd
.normalizeDistanceEffectOfChromosome <- function(object) {
chromosomeName <- as.character(
SummarizedExperiment::mcols(object)$chromosome[1]
)
message(
"Chromosome ",
chromosomeName,
": normalizing distance effect."
)
currentAssay <- SummarizedExperiment::assay(object)
# Reordering columns in alphabetic order (useful for tests)
validAssay <- object@validAssay[[chromosomeName]]
refOrder <- paste(object$condition, object$replicate)
values <- currentAssay[, validAssay, drop = FALSE]
values <- values[, order(refOrder[validAssay]), drop = FALSE]
distances <- InteractionSet::pairdist(object, type = "mid")
chromosomeValues <- data.table(
"distance" = rep(distances, length(validAssay)),
"value" = as.vector(values)
)
chromosomeValues <- chromosomeValues[!is.na(value), ]
setorder(chromosomeValues, distance, value)
idSample <- sample(
seq_len(nrow(chromosomeValues)),
size = min(
object@parameters$loessSampleSize,
nrow(chromosomeValues)
)
)
sample <- chromosomeValues[idSample]
setorder(sample, distance)
if (nrow(sample) == 0) {
message("Chromosome ", chromosomeName, " is empty.")
return(NULL)
}
optimizeSpan <- function(
model,
criterion = c("aicc", "gcv"),
spans = c(0.01, 0.9)
) {
criterion <- match.arg(criterion)
result <- stats::optimize(
function(span) {
model <- stats::update(model, span = span)
span <- model$pars$span
trace <- model$trace.hat
sigma2 <- sum(model$residuals ^ 2) / (model$n - 1)
if (criterion == "aicc") {
quality <- (
log(sigma2) + 1 + 2 * (2 * (trace + 1)) /
(model$n - trace - 2)
)
} else if (criterion == "gcv") {
quality <- model$n * sigma2 / (model$n - trace) ^ 2
}
return(quality)
},
spans
)
return(result$minimum)
}
traceMethod <- "approximate"
if (object@parameters$loessSampleSize <= 1000) {
traceMethod <- "exact"
}
sample[,logvalue := log(value)]
sample[,logdistance := log(distance + 1)]
loess <- stats::loess(
logvalue ~ logdistance,
data = sample,
control = stats::loess.control(trace.hat = traceMethod)
)
span <- optimizeSpan(loess, criterion = "gcv")
loess <- stats::loess(
logvalue ~ logdistance,
span = span,
data = sample,
control = stats::loess.control(trace.hat = traceMethod)
)
sample[, bias := stats::predict(loess)]
sample[, value := NULL]
sample[, logvalue := NULL]
sample[, logdistance := NULL]
data.table::setnames(sample, "distance", "sampleDistance")
sample <- unique(sample)
uniqueDistances <- unique(sort(chromosomeValues$distance))
sampleDistance <- unique(sort(sample$sampleDistance))
sampleDistance <- vapply(
uniqueDistances,
function(distance) {
sampleDistance[which.min(abs(distance - sampleDistance))]
},
FUN.VALUE = 0
)
valueMap <- data.table(
"distance" = uniqueDistances,
"sampleDistance" = sampleDistance
)
valueMap <- data.table::merge.data.table(
valueMap,
sample,
by = "sampleDistance"
)
# Sort sample distances and bias in the same order than the data
loessDistances <- data.table::merge.data.table(
data.table("distance" = distances),
valueMap,
by = "distance",
sort = FALSE,
all.x = TRUE
)
currentAssay <- log(currentAssay) - loessDistances$bias
return(currentAssay)
}
#' @title
#' Normalize distance effect.
#'
#' @description
#' Normalizes interactions by their "expected" value relative to the distance
#' that separates their positions. The "expected" values are estimated with a
#' loess regression on the proportion of interactions for each distance.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param loessSampleSize
#' The number of positions used as a sample to estimate the effect of distance
#' on proportion of interactions. Defaults to
#' \code{object$loessSampleSize} which is originally set to
#' \code{defaultHiCDOCParameters$loessSampleSize} = 20000.
#' @param parallel
#' Should the normalization be run in parallel mode? Default to FALSE.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}} with normalized interactions.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' object <- normalizeDistanceEffect(exampleHiCDOCDataSet)
#'
#' @seealso
#' \code{\link{normalizeTechnicalBiases}},
#' \code{\link{normalizeBiologicalBiases}},
#' \code{\link{HiCDOC}}
#'
#' @export
normalizeDistanceEffect <- function(
object,
loessSampleSize = NULL,
parallel = FALSE
) {
if (!is.null(loessSampleSize)) {
object@parameters$loessSampleSize <- loessSampleSize
}
object@parameters <- .validateParameters(object@parameters)
objectChromosomes <- S4Vectors::split(
object,
SummarizedExperiment::mcols(object)$chromosome,
drop = FALSE
)
normAssay <- .internalLapply(
parallel,
objectChromosomes,
FUN = .normalizeDistanceEffectOfChromosome
)
normAssay <- do.call("rbind", normAssay)
SummarizedExperiment::assay(object) <- normAssay
return(object)
}
<file_sep>#' Interactions plot using facet_grid
#'
#' @param interactions
#' a data.table
#' @param xylim
#' a length 2 vector : limits of the matrix
#' @param transform
#' character : transformation to be passed to \code{scale_fill_gradient2}
#' @param colours
#' vector of colors of length 3 to be passed to \code{scale_fill_gradient2}
#' @param midpoint
#' midpoint value to be passed to \code{scale_fill_gradient2}
#' @param chromosomeName
#' Character, name of the chromosome
#' @return
#' a ggplot object
#'
#' @keywords internal
#' @noRd
.plotInteractionsGrid <- function(
dataplot,
xylim,
transform,
colours,
midpoint,
chromosomeName
) {
plot <- ggplot(
data = dataplot,
aes(x = start1, y = start2, z = interaction)
) + geom_tile(
aes(fill = interaction),
na.rm = TRUE
) + geom_tile(
data = dataplot[start1 != start2, ],
aes(x = start2, y = start1, fill = interaction),
na.rm = TRUE
) + coord_fixed(ratio = 1) + theme_bw() + xlim(xylim) + scale_y_reverse(
limits = rev(xylim)
) + facet_grid(
condition ~ replicate,
drop = FALSE
) + labs(
title = paste0("Chromosome ", chromosomeName),
x = "",
y = ""
) + scale_fill_gradient2(
low=colours[1],
mid=colours[2],
high=colours[3],
midpoint = midpoint,
trans = transform,
name = "Intensity",
na.value = "transparent"
)
return(plot)
}
#' Interactions plot using facet_wrap
#'
#' @param interactions
#' a data.table
#' @param xylim
#' a length 2 vector : limits of the matrix
#' @param transform
#' character : transformation to be passed to \code{scale_fill_gradient2}
#' @param colours
#' vector of colors of length 3 to be passed to \code{scale_fill_gradient2}
#' @param midpoint
#' midpoint value to be passed to \code{scale_fill_gradient2}
#' @param chromosomeName
#' Character, name of the chromosome
#' @param totalRows
#' Integer, number of rows in facet_wrap
#' @param totalCols
#' Integer, number of colums in facet_wrap
#'
#' @return
#' a ggplot object
#'
#' @keywords internal
#' @noRd
.plotInteractionsWrap <- function(
dataplot,
xylim,
transform,
colours,
midpoint,
chromosomeName,
totalRows,
totalCols
) {
plot <- ggplot(
data = dataplot,
aes(x = start1, y = start2, z = interaction)
) + geom_tile(
aes(fill = interaction),
na.rm = TRUE
) + geom_tile(
data = dataplot[start1 != start2, ],
aes(x = start2, y = start1, fill = interaction),
na.rm = TRUE
) + coord_fixed(ratio = 1) + theme_bw() + xlim(xylim) + scale_y_reverse(
limits = rev(xylim)
) + facet_wrap(
. ~ variable,
ncol = totalCols,
nrow = totalRows,
drop = FALSE
) + labs(
title = paste("Chromosome ", chromosomeName),
x = "",
y = ""
) + scale_fill_gradient2(
low=colours[1],
mid=colours[2],
high=colours[3],
midpoint = midpoint,
trans = transform,
name = "Intensity",
na.value = "transparent"
)
return(plot)
}
#' @title
#' Plot interaction matrices.
#'
#' @description
#' Plots the interaction matrices as heatmaps.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param transform
#' Transformation of the color scale. Default to NULL (no transformation). See
#' \code{\link[ggplot2]{scale_fill_gradient2}} for other accepted values.
#' @param colours
#' A character vector colours of length 3 to use for the gradient. See
#' \code{\link[ggplot2]{scale_fill_gradient2}} for more info. Defaults to
#' \code{c("low"=#2c7bb6", "mid"=#ffffbf", "high"="#d7191c")}.
#' @param midpoint
#' midpoint value to be passed to \code{scale_fill_gradient2}.
#' Default to 0.
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' plotInteractions(exampleHiCDOCDataSet, chromosome = 1)
#'
#' @export
plotInteractions <- function(
object,
chromosome,
transform = NULL,
colours = c("low" = "#2c7bb6", "mid" = "#ffffbf", "high" = "#d7191c"),
midpoint = 0
) {
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
if (is.null(transform)) {
transform <- "identity"
}
rowsChromosome <- (S4Vectors::mcols(object)$chromosome == chromosomeName)
assayChromosome <- SummarizedExperiment::assay(object[rowsChromosome, ])
assayChromosome <- data.table::as.data.table(assayChromosome)
data.table::setnames(
assayChromosome,
paste(object$condition, object$replicate, sep = "_")
)
interactionsChromosome <- InteractionSet::interactions(
object[rowsChromosome, ]
)
interactionsChromosome <- as.data.table(interactionsChromosome)
dataplot <- base::cbind(
interactionsChromosome[, .(
seqnames = seqnames1,
start1,
start2
)],
assayChromosome
)
dataplot <- data.table::melt.data.table(
dataplot,
id.vars = c("seqnames", "start1", "start2"),
value.name = "interaction",
variable.factor = FALSE
)
dataplot <- dataplot[!is.na(interaction)]
if (nrow(dataplot) == 0) {
message("No interactions for chromosome ", chromosomeName, ".")
return(NULL)
}
dataplot[, c("condition", "replicate") := data.table::tstrsplit(
variable,
"_",
fixed = TRUE
)]
dataplot[, condition := factor(
condition,
levels = sort(unique(object$condition))
)]
dataplot[, replicate := factor(
replicate,
levels = sort(unique(object$replicate))
)]
regionsChromosome <- as.data.table(InteractionSet::regions(object))
regionsChromosome <- regionsChromosome[seqnames == chromosomeName]
xylim <- c(min(regionsChromosome$start), max(regionsChromosome$start))
if(length(colours) != 3){
stop("`colours` must be a vector of length 3.")
} else {
if(is.null(names(colours))) names(colours) <- c("low", "mid", "high")
if(!identical(sort(names(colours)), c("high", "low", "mid"))){
stop("`colours` are supposed to be named 'low', 'mid' and 'high")
}
}
if (length(unique(object$replicate)) <= max(table(object$condition))) {
plot <- .plotInteractionsGrid(
dataplot,
xylim,
transform,
colours,
midpoint,
chromosomeName
)
} else {
totalLevels <- table(object$condition)
totalCols <- max(totalLevels)
totalRows <- length(unique(object$condition))
existing <- by(object$replicate, object$condition, unique)
existing <- lapply(existing, as.character)
existing <- lapply(existing, .completeLevels, totalCols)
existing <- mapply(
paste,
names(existing),
existing,
sep = "_",
SIMPLIFY = FALSE
)
allLevels <- unlist(existing, use.names = FALSE)
dataplot[, variable := factor(variable, levels = allLevels)]
plot <- .plotInteractionsWrap(
dataplot,
xylim,
transform,
colours,
midpoint,
chromosomeName,
totalRows,
totalCols
)
}
return(plot)
}
<file_sep>data(exampleHiCDOCDataSet)
test_that("filterSparseReplicates behaves as expected", {
# default filtering on the example dataset
expect_message(
object <- filterSparseReplicates(exampleHiCDOCDataSet),
"Removed 1 replicate in total",
fixed = TRUE
)
expect_equal(nrow(object), 43480)
expect_identical(object@chromosomes, c("W", "X", "Y", "Z"))
expect_identical(object@parameters$sparseReplicateThreshold, 0.3)
expect_is(object@validAssay, "list")
expect_equal(object@validAssay,
list("W" = c(1, 2, 4, 5, 6, 7),
"X" = c(1, 2, 5, 6, 7),
"Y" = 1:7,
"Z" = c(4,7)))
assay <- SummarizedExperiment::assay(object)
expect_equal(sum(!is.na(assay)), 185382)
expect_equal(colSums(assay, na.rm=TRUE),
c(9217842, 23119136 , 7810636, 29607663,
17856784, 24182877, 23488484))
})
test_that("filterSparseReplicates behaves as expected with custom param", {
# Filter 1 chromosome on the example dataset
expect_message(
object <- filterSparseReplicates(exampleHiCDOCDataSet, 0.9995),
"Removed 4 replicates in total.",
fixed = TRUE
)
expect_equal(nrow(object), 43480)
expect_identical(object@chromosomes, c("W", "X", "Y", "Z"))
# expect_identical(object@weakBins, list("18" = NULL))
expect_identical(object@parameters$sparseReplicateThreshold, 0.9995)
assay <- SummarizedExperiment::assay(object)
expect_equal(sum(!is.na(assay)), 153080)
expect_equal(colSums(assay, na.rm=TRUE),
c(5338026, 23119136 , 7810636, 29607663,
10799881, 9615769, 23488484))
})
<file_sep>#' @title
#' Check centroid PCA
#'
#' @description
#' Check whether centroids are correctly placed on a PCA.
#'
#' @param chromosomeName
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A \code{data.table}, with 3 elements: the name of the chromosome,
#' whether the centroids of each compartment have the same sign
#' only works when the number of conditions is 2), and whether the
#' variance explained by the first axis is greater than a threshold
#' (here, 75%).
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' .checkPca("X", exampleHiCDOCDataSetProcessed)
#'
#' @noRd
.checkPca <- function(chromosomeName, object) {
compartments <- as.character(unique(object@centroids$compartment))
pcaData <- .computePca(object, chromosomeName)
pca <- pcaData$PCA
propvar <- pcaData$propvar
if(is.null(pca)){
pc1 <- FALSE
centroid <- FALSE
} else {
f <- function (c) {
return(length(unique(sign(pca[compartment == c, ]$PC1))) == 1)
}
centroid <- all(vapply(compartments, f, FUN.VALUE = TRUE))
pc1 <- (propvar[[1]] >= object@parameters$PC1CheckThreshold)
}
return(data.table(chromosome = chromosomeName,
centroid.check = centroid,
PC1.check = pc1))
}
#' @title
#' Check compartment assignment
#'
#' @description
#' Check "A" compartments are different from "B" compartments.
#'
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A \code{data.table}, with 2 elements: the name of the chromosome,
#' whether the check passed (a Wilcoxon test, with p-value less than 5%).
#'
#' @noRd
.checkCompartmentAssignment <- function(chromosomeName, object) {
compartments <- object@compartments[chromosome == chromosomeName, ]
selfInteractionRatios <-
object@selfInteractionRatios[chromosome == chromosomeName, ]
compartments <- data.table::merge.data.table(compartments,
selfInteractionRatios,
by = c("index", "condition"))
t <- wilcox.test(ratio ~ compartment, data = compartments)
return(data.table(chromosome = chromosomeName,
assignment.check = (t$p.value <= 0.05)))
}
#' @title
#' Check whether compartments satisfy sanity checks.
#'
#' @description
#' Check centroid PCA and compartment assignments.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @return
#' A \code{data.table}, with 3 elements: the name of the chromosome,
#' whether the centroids of each compartment have the same sign
#' only works when the number of conditions is 2), and whether the
#' variance explained by the first axis is greater than a threshold
#' (here, 75%), and whether the check passed (a Wilcoxon test, with p-value
#' less than 5%).
#'
#' @noRd
.checkResults <- function(object) {
pcaChecks <- lapply(chromosomes(object), .checkPca, object)
pcaChecks <- data.table::rbindlist(pcaChecks)
compartmentChecks <- lapply(chromosomes(object),
.checkCompartmentAssignment, object)
compartmentChecks <- data.table::rbindlist(compartmentChecks)
checks <- pcaChecks
checks[, assignment.check := compartmentChecks$assignment.check]
object@checks <- checks
return(object)
}
<file_sep>#' @description
#' Removes weak positions of a given chromosome.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeName
#' The name of a chromosome.
#' @param threshold
#' The minimum average interaction for a position to be kept.
#'
#' @return
#' A list of:
#' - The weak positions.
#' - The filtered interactions.
#'
#' @keywords internal
#' @noRd
.filterWeakPositionsOfChromosome <- function(
chromosomeName,
reducedObject,
threshold
) {
validColumns <- reducedObject@validAssay[[chromosomeName]]
interactions <- as.data.table(InteractionSet::interactions(reducedObject))
interactions <- interactions[, .(index1, index2)]
# All known bins
minBin <- min(interactions$index1, interactions$index2)
maxBin <- max(interactions$index1, interactions$index2)
allBins <- seq(minBin, maxBin)
# Reducing the values of diagonal by 0.5 factor -> only upper matrix.
diagonal <- (interactions$index1 == interactions$index2)
matrixAssay <- SummarizedExperiment::assay(reducedObject)[,validColumns]
matrixAssay <- matrixAssay * (1-0.5*(diagonal))
interactions <- base::cbind(
interactions,
matrixAssay
)
interactions <- data.table::melt.data.table(
interactions,
id.vars = c("index1", "index2"),
na.rm = FALSE
)
interactions[is.na(value),value := 0]
totalBins <- reducedObject@totalBins[chromosomeName]
removedBins <- allBins[
!(allBins %in% unique(c(interactions$index1, interactions$index1)))
]
totalNewWeakBins <- 1
totalRemovedBins <- 0
# Recursive removal of bins - deleting a bin can create a new weak bin.
while (totalNewWeakBins > 0 && totalRemovedBins <= length(allBins)) {
sum1 <- interactions[
,
.(sum1 = sum(value, na.rm = TRUE)),
by = .(index = index1, variable)
]
sum2 <- interactions[
,
.(sum2 = sum(value, na.rm = TRUE)),
by = .(index = index2, variable)
]
sum12 <- data.table::merge.data.table(
sum1,
sum2,
by = c("index", "variable"),
all = TRUE
)
sum12[is.na(sum1), sum1 := 0]
sum12[is.na(sum2), sum2 := 0]
sum12[, mean := (sum1 + sum2) / totalBins]
weakBins <- unique(sum12[mean < threshold, index])
totalNewWeakBins <- length(weakBins) - totalRemovedBins
removedBins <- c(removedBins, weakBins)
# Remove interactions of weak bins
if (totalNewWeakBins > 0) {
interactions <- interactions[
!(index1 %in% weakBins | index2 %in% weakBins)
]
totalRemovedBins <- totalRemovedBins + totalNewWeakBins
}
}
message(
"Chromosome ",
chromosomeName,
": ",
length(removedBins),
" position",
if (length(removedBins) != 1) "s",
" removed, ",
length(allBins) - length(removedBins),
" position",
if (length(allBins) - length(removedBins) != 1) "s",
" remaining."
)
return(removedBins)
}
#' @title
#' Filter weak positions.
#'
#' @description
#' Removes weak genomic positions whose interactions average is lower than the
#' threshold.
#'
#' @details
#' Detects weak genomic positions in each replicate, and removes them from all
#' replicates to guarantee comparability across conditions when calling
#' \code{\link{detectCompartments}}.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param threshold
#' The minimum average interaction for a position to be kept. If a position's
#' average interaction with the entire chromosome is lower than this value in
#' any of the replicates, it is removed from all replicates and conditions.
#' Defaults to \code{object$smallChromosomeThreshold} which is originally set to
#' \code{defaultHiCDOCParameters$smallChromosomeThreshold} = 1.
#'
#' @return
#' A filtered \code{\link{HiCDOCDataSet}}.
#'
#' @seealso
#' \code{\link{filterSmallChromosomes}},
#' \code{\link{filterSparseReplicates}},
#' \code{\link{HiCDOC}}
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' object <- exampleHiCDOCDataSet
#'
#' object <- filterWeakPositions(object)
#'
#' @export
filterWeakPositions <- function(object, threshold = NULL) {
.validateSlots(
object,
slots = c(
"chromosomes",
"totalBins",
"parameters"
)
)
if (!is.null(threshold)) {
object@parameters$weakPositionThreshold <- threshold
}
object@parameters <- .validateParameters(object@parameters)
threshold <- object@parameters$weakPositionThreshold
message(
"Keeping positions with interactions average greater or equal to ",
threshold,
"."
)
objectChromosomes <- S4Vectors::split(
object,
SummarizedExperiment::mcols(object)$chromosome,
drop = FALSE
)
weakBins <- pbapply::pbmapply(
function(c, m, t) .filterWeakPositionsOfChromosome(c, m, t),
object@chromosomes,
objectChromosomes,
threshold
)
object@weakBins <- weakBins
indices <- as.data.table(InteractionSet::interactions(object))
toRemove <- (
indices$index1 %in% unlist(weakBins) |
indices$index2 %in% unlist(weakBins)
)
if (sum(toRemove)>0) {
object <- object[!toRemove,]
object <- InteractionSet::reduceRegions(object)
object@validAssay <- .determineValids(object)
# Remove empty chromosomes
leftChromosomes <-
names(object@validAssay[vapply(object@validAssay,
function(x) length(x)>0,
FUN.VALUE = TRUE)])
if(!identical(leftChromosomes,object@chromosomes)){
object <- reduceHiCDOCDataSet(object, chromosomes = leftChromosomes)
}
}
totalWeakBins <- sum(vapply(weakBins, length, FUN.VALUE = 0))
message(
"Removed ",
totalWeakBins,
" position",
if (totalWeakBins != 1) "s",
" in total."
)
if (length(toRemove) == sum(toRemove)) {
warning("No data left!", call. = FALSE)
}
return(object)
}
<file_sep>library(testthat)
library(HiCDOC)
test_check("HiCDOC")
<file_sep>data(exampleHiCDOCDataSet)
object <- reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes=c("X", "Z"), conditions = c(1, 2))
test_that("detectCompartments behaves as expected", {
# Detect Compartments
set.seed(3215) # Test with 123 : no significant differences
expect_message(
object <- detectCompartments(object, parallel = FALSE),
"Detecting significant differences."
)
# Create new objects in correct format
expect_is(object@distances, "data.table")
expect_is(object@selfInteractionRatios, "data.table")
expect_is(object@compartments, "GRanges")
expect_is(object@concordances, "GRanges")
expect_is(object@differences, "GRanges")
expect_is(object@centroids, "data.table")
# Differences
expect_equal(length(object@differences), 1)
expect_equal(
length(object@differences[object@differences$pvalue.adjusted <= 0.05]),
0
)
expect_is(object@differences@seqnames, "Rle")
expect_is(object@differences$condition.1, "factor")
expect_is(object@differences$condition.2, "factor")
# Centroids
expect_equal(nrow(object@centroids), 6)
expect_equal(
sapply(object@centroids$centroid, length),
c(rep(120, 4), rep(200, 2))
)
expect_equal(
sapply(object@centroids$centroid, mean),
c(572.776, 631.262, 514.537, 571.618, 676.45, 803.454),
tolerance = 1e-04
)
expect_is(object@centroids$chromosome, "factor")
expect_is(object@centroids$condition, "factor")
expect_is(object@centroids$compartment, "factor")
expect_is(object@centroids$centroid, "list")
# Compartments
expect_equal(length(object@compartments), 440)
expect_equal(
length(object@compartments[object@compartments$compartment == "B"]),
169
)
expect_is(object@compartments@seqnames, "Rle")
expect_is(object@compartments$condition, "factor")
expect_is(object@compartments$compartment, "factor")
expect_is(object@compartments$index, "numeric")
# Concordance
expect_is(object@concordances@seqnames, "Rle")
expect_is(object@concordances$index, "numeric")
expect_is(object@concordances$condition, "factor")
expect_is(object@concordances$replicate, "factor")
expect_is(object@concordances$compartment, "factor")
expect_is(object@concordances$concordance, "numeric")
expect_equal(length(object@concordances), 880)
expect_equal(
length(object@concordances[object@concordances$compartment == "A"]),
543
)
expect_equal(
100 * mean(object@concordances$concordance),
0.03121712,
tolerance = 1e-05
)
# Distances
expect_is(object@distances$chromosome, "factor")
expect_is(object@distances$index, "numeric")
expect_is(object@distances$condition, "factor")
expect_is(object@distances$replicate, "factor")
expect_is(object@distances$compartment, "factor")
expect_is(object@distances$distance, "numeric")
expect_equal(nrow(object@distances), 1760)
expect_equal(mean(object@distances$distance), 5713.632, tolerance = 1e-04)
# SelfInteractionRatios
expect_equal(nrow(object@selfInteractionRatios), 879)
expect_equal(
mean(object@selfInteractionRatios$ratio),
1087.815,
tolerance = 1e-04
)
expect_is(object@selfInteractionRatios$chromosome, "factor")
expect_is(object@selfInteractionRatios$index, "numeric")
expect_is(object@selfInteractionRatios$condition, "factor")
expect_is(object@selfInteractionRatios$replicate, "factor")
expect_true(is.numeric(object@selfInteractionRatios$ratio))
})
<file_sep>test_that("normalizeTechnicalBiases behaves as expected", {
data(exampleHiCDOCDataSet)
object <- reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = c("X"))
object <- filterSparseReplicates(object)
object <- filterWeakPositions(object)
# Apply normalization
set.seed(123)
expect_warning(norm <- normalizeTechnicalBiases(object, parallel = FALSE))
# Keep object format
expect_equal(nrow(norm), nrow(object))
assay <- SummarizedExperiment::assay(norm)
expect_equal(sum(!is.na(assay)), 35105)
# Two different possible values,
# 1rst with
# BLAS: openblas/libblas.so.3
# LAPACK: libopenblasp-r0.2.20.so
# 2nd with
# Matrix products: default
# BLAS: atlas/libblas.so.3.10.3
# LAPACK: atlas/liblapack.so.3.10.
expect_equal(any(sapply(list(
c(751294.6, 1152226.8, 0, 0, 721966.6, 721539.2, 910899.8),
c(751294.6, 1152227.4, 0.0, 0.0, 721967.5, 721538.9, 910899.4)),
function(x) identical(x, round(colSums(assay, na.rm=TRUE),1)))), TRUE
)
})
<file_sep>---
title: "HiCDOC"
date: "`r format(Sys.Date(), '%m/%d/%Y')`"
author: "<NAME> & <NAME> & <NAME> & <NAME>"
output:
BiocStyle::html_document:
fig_width: 7
fig_height: 5
toc_float: true
bibliography: library.bib
vignette: >
%\VignetteIndexEntry{HiCDOC}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
%\VignettePackage{HiCDOC}
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
comment = "#>"
)
options(warn=-1)
```
# Introduction
The aim of HiCDOC is to detect significant A/B compartment changes, using Hi-C
data with replicates.
HiCDOC normalizes intrachromosomal Hi-C matrices, uses unsupervised learning to
predict A/B compartments from multiple replicates, and detects significant
compartment changes between experiment conditions.
It provides a collection of functions assembled into a pipeline:
1. [Filter](#filtering-data):
1. Remove chromosomes which are too small to be useful.
2. Filter sparse replicates to remove uninformative replicates with few
interactions.
3. Filter positions (*bins*) which have too few interactions.
2. [Normalize](#normalizing-biases):
1. Normalize technical biases (inter-matrix normalization) using
cyclic loess normalization [@multihiccompare], so that
matrices are comparable.
2. Normalize biological biases (intra-matrix normalization) using
Knight-Ruiz matrix balancing [@kr], so that
all the bins are comparable.
3. Normalize the distance effect, which results from higher interaction
proportions between closer regions, with a MD loess.
3. [Predict](#predicting-compartments-and-differences):
1. Predict compartments using
constrained K-means [@kmeans].
2. Detect significant differences between experiment conditions.
4. [Visualize](#visualizing-data-and-results):
1. Plot the interaction matrices of each replicate.
2. Plot the overall distance effect on the proportion of interactions.
3. Plot the compartments in each chromosome, along with their concordance
(confidence measure) in each replicate, and significant changes between
experiment conditions.
4. Plot the overall distribution of concordance differences.
5. Plot the result of the PCA on the compartments' centroids.
6. Plot the boxplots of self interaction ratios (differences between self
interactions and the medians of other interactions) of each compartment,
which is used for the A/B classification.
# Installation
HiCDOC can be installed from Bioconductor:
```{r, eval=FALSE}
if (!requireNamespace("BiocManager", quietly=TRUE))
install.packages("BiocManager")
BiocManager::install("HiCDOC")
```
The package can then be loaded:
```{r}
library(HiCDOC)
```
# Importing Hi-C data
HiCDOC can import Hi-C data sets in various different formats:
- Tabular `.tsv` files.
- Cooler `.cool` or `.mcool` files.
- Juicer `.hic` files.
- HiC-Pro `.matrix` and `.bed` files.
## Tabular files
A tabular file is a tab-separated multi-replicate sparse matrix with a header:
chromosome position 1 position 2 C1.R1 C1.R2 C2.R1 ...
Y 1500000 7500000 145 184 72 ...
...
The number of interactions between `position 1` and `position 2` of
`chromosome` are reported in each `condition.replicate` column. There is no
limit to the number of conditions and replicates.
To load Hi-C data in this format:
```{r tabFormat, eval = FALSE}
hic.experiment <- HiCDOCDataSetFromTabular('path/to/data.tsv')
```
## Cooler files
To load `.cool` or `.mcool` files generated by [Cooler][cooler-documentation]
[@cooler]:
```{r coolFormat, eval = FALSE}
# Path to each file
paths = c(
'path/to/condition-1.replicate-1.cool',
'path/to/condition-1.replicate-2.cool',
'path/to/condition-2.replicate-1.cool',
'path/to/condition-2.replicate-2.cool',
'path/to/condition-3.replicate-1.cool'
)
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- c(1, 2, 1, 2, 1)
conditions <- c(1, 1, 2, 2, 3)
# Resolution to select in .mcool files
binSize = 500000
# Instantiation of data set
hic.experiment <- HiCDOCDataSetFromCool(
paths,
replicates = replicates,
conditions = conditions,
binSize = binSize # Specified for .mcool files.
)
```
## Juicer files
To load `.hic` files generated by [Juicer][juicer-documentation] [@juicer]:
```{r hicFormat, eval = FALSE}
# Path to each file
paths = c(
'path/to/condition-1.replicate-1.hic',
'path/to/condition-1.replicate-2.hic',
'path/to/condition-2.replicate-1.hic',
'path/to/condition-2.replicate-2.hic',
'path/to/condition-3.replicate-1.hic'
)
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- c(1, 2, 1, 2, 1)
conditions <- c(1, 1, 2, 2, 3)
# Resolution to select
binSize <- 500000
# Instantiation of data set
hic.experiment <- HiCDOCDataSetFromHiC(
paths,
replicates = replicates,
conditions = conditions,
binSize = binSize
)
```
## HiC-Pro files
To load `.matrix` and `.bed` files generated by [HiC-Pro][hicpro-documentation]
[@hicpro]:
```{r hicproFormat, eval = FALSE}
# Path to each matrix file
matrixPaths = c(
'path/to/condition-1.replicate-1.matrix',
'path/to/condition-1.replicate-2.matrix',
'path/to/condition-2.replicate-1.matrix',
'path/to/condition-2.replicate-2.matrix',
'path/to/condition-3.replicate-1.matrix'
)
# Path to each bed file
bedPaths = c(
'path/to/condition-1.replicate-1.bed',
'path/to/condition-1.replicate-2.bed',
'path/to/condition-2.replicate-1.bed',
'path/to/condition-2.replicate-2.bed',
'path/to/condition-3.replicate-1.bed'
)
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- c(1, 2, 1, 2, 1)
conditions <- c(1, 1, 2, 2, 3)
# Instantiation of data set
hic.experiment <- HiCDOCDataSetFromHiCPro(
matrixPaths = matrixPaths,
bedPaths = bedPaths,
replicates = replicates,
conditions = conditions
)
```
# Running the HiCDOC pipeline
An example dataset can be loaded from the HiCDOC package:
```{r reloadExample}
data(exampleHiCDOCDataSet)
```
Once your data is loaded, you can run all the filtering, normalization, and
prediction steps with the command : **`HiCDOC(exampleHiCDOCDataSet)`**.
This one-liner runs all the steps detailed below.
## Filtering data
Remove small chromosomes of length smaller than 100 positions
(100 is the default value):
```{r filterSmallChromosomes}
hic.experiment <- filterSmallChromosomes(exampleHiCDOCDataSet, threshold = 100)
```
Remove sparse replicates filled with less than 30% non-zero interactions
(30% is the default value):
```{r filterSparseReplicates}
hic.experiment <- filterSparseReplicates(hic.experiment, threshold = 0.3)
```
Remove weak positions with less than 1 interaction in average
(1 is the default value):
```{r filterWeakPositions}
hic.experiment <- filterWeakPositions(hic.experiment, threshold = 1)
```
## Normalizing biases
### Technical biases
Normalize technical biases such as sequencing depth (inter-matrix
normalization) so that matrices are comparable :
```{r normalizeTechnicalBiases}
suppressWarnings(hic.experiment <- normalizeTechnicalBiases(hic.experiment))
```
This normalization uses uses cyclic loess normalization from [multiHiCcompare package] [@multihiccompare].
**Note** : For large dataset, it is highly recommended to set a value for
`cycleLoessSpan` parameter to reduce computing time and necessary memory. See
`?HiCDOC::normalizeTechnicalBiases`
### Biological biases
Normalize biological biases, such as GC content, number of restriction sites,
etc. (intra-matrix normalization):
```{r normalizeBiologicalBiases}
hic.experiment <- normalizeBiologicalBiases(hic.experiment)
```
### Distance effect
Normalize the linear distance effect resulting from more interactions between
closer genomic regions (20000 is the default value for `loessSampleSize`):
```{r normalizeDistanceEffect}
hic.experiment <-
normalizeDistanceEffect(hic.experiment, loessSampleSize = 20000)
```
## Predicting compartments and differences
Predict A and B compartments and detect significant differences, here using
the default values as parameters:
```{r detectCompartments}
hic.experiment <- detectCompartments(hic.experiment)
```
# Visualizing data and results
Plot the interaction matrix of each replicate:
```{r plotInteractions}
p <- plotInteractions(hic.experiment, chromosome = "Y")
p
```
Plot the overall distance effect on the proportion of interactions:
```{r plotDistanceEffect}
p <- plotDistanceEffect(hic.experiment)
p
```
List and plot compartments with their concordance (confidence measure) in each
replicate, and significant changes between experiment conditions:
```{r extractCompartments}
compartments(hic.experiment)
```
```{r extractConcordances}
concordances(hic.experiment)
```
```{r extractDifferences}
differences(hic.experiment)
```
```{r plotCompartmentChanges}
p <- plotCompartmentChanges(hic.experiment, chromosome = "Y")
p
```
Plot the overall distribution of concordance differences:
```{r plotConcordanceDifferences}
p <- plotConcordanceDifferences(hic.experiment)
p
```
Plot the result of the PCA on the compartments' centroids:
```{r plotCentroids}
p <- plotCentroids(hic.experiment, chromosome = "Y")
p
```
Plot the boxplots of self interaction ratios (differences between self
interactions and the median of other interactions) of each compartment:
```{r plotSelfInteractionRatios}
p <- plotSelfInteractionRatios(hic.experiment, chromosome = "Y")
p
```
## Sanity checks
Sometimes, basic assumptions on the data are not met.
We try to detect inconsistencies, and warn the user.
### PCA checks
We perform a principal component analysis on the centroids.
Each centroid represent an ideal bin, located either on compartment A or B,
in each sample, and each chromosome.
Given a chromosome, if all the centroids of the A compartment do not have the
same sign on the first principal component, we issue a warning for this
chromosome.
Likewise for the B compartment.
We also check that the inertia on the first principal component is at least
75%.
These checks make sure that centroids of the same compartments do cluster
together.
If the conditions are too different from each other, they may cluster together.
This case is detected by this check.
### Compartment assignemnt
We use "self-interaction" in order to classify centroids to A and B
compartments.
The self-interaction of a bin is the ratio of the number of pairs that link this
bin with other bins of the same compartment, divided by the number of pairs
The self-interactions should be different from compartments A and B.
We perform a Wilcoxon t-test.
If it is not significant, then a warning is issued.
### Warnings
If at least of the PCA checks fail, the warnings are printed on the PCA plot.
If the compartment assignment check fail, the warning is printed on the
corresponding plot.
When accessing the compartments and the concordances, chromosomes which
fail to pass the checks are removed (unless the appropriate parameter is set).
# Session info
```{r sessionInfo}
sessionInfo()
```
# References
[cooler-documentation]: https://cooler.readthedocs.io/en/latest/
[juicer-documentation]: https://github.com/aidenlab/juicer/wiki/Data
[hicpro-documentation]: https://github.com/nservant/HiC-Pro
<file_sep>#' @title
#' Plot concordances.
#'
#' @description
#' Plots the concordances of each replicate in each experiment condition. A
#' concordance can be understood as a confidence in a genomic position's
#' assigned compartment. Mathematically, it is the log ratio of a genomic
#' position's distance to each compartment's centroid, normalized by the
#' distance between both centroids, and min-maxed to a [-1,1] interval.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param xlim
#' A vector of the minimum and maximum positions to display. If NULL, displays
#' all positions. Defaults to NULL.
#' @param threshold
#' Significance threshold for the compartment changes. Defaults to 0.05.
#' @param points
#' Whether or not to add points to the concordances. Defaults to FALSE.
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' plotConcordances(exampleHiCDOCDataSetProcessed, chromosome = 1)
#'
#' @export
plotConcordances <- function(
object,
chromosome,
xlim = NULL,
threshold = 0.05,
points = FALSE
) {
.validateSlots(object, slots = c("concordances", "differences"))
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
xlim <- .validateXlim(xlim, object, chromosomeName)
concordances <- object@concordances[
GenomeInfoDb::seqnames(object@concordances) == chromosomeName
]
concordances <- data.table::as.data.table(concordances)
concordances[, condition := paste0("Concordances\n", condition)]
concordances <- concordances[start >= xlim[1] & start <= xlim[2]]
binSize <- .modeVector(concordances$width)
if (nrow(concordances) == 0) {
message("No concordances for chromosome ", chromosomeName, ".")
return(NULL)
}
# Significant differences
differences <- object@differences[
GenomeInfoDb::seqnames(object@differences) == chromosomeName
]
differences <- as.data.table(differences)
differences <- differences[pvalue.adjusted <= threshold]
differences <- differences[start >= xlim[1] & start <= xlim[2]]
differences <- data.table::melt(
differences,
id.vars = c("seqnames", "start", "end"),
measure.vars = c("condition.1", "condition.2"),
value.name = "condition"
)
differences[, condition := paste0("Concordances\n", condition)]
caption <- "The grey areas are significant changes"
if (nrow(differences) == 0) {
caption <- "No change is significant"
}
caption <- paste0(
caption,
" (adjusted p-value <= ",
round(100 * threshold, 2),
"%)"
)
ylim <- c(
min(concordances$concordance, na.rm = TRUE),
max(concordances$concordance, na.rm = TRUE)
)
plot <- ggplot()
if (nrow(differences) > 0) {
plot <- plot + geom_rect(
data = differences,
aes(
xmin = start,
xmax = end,
ymin = ylim[1],
ymax = ylim[2]
),
color = NA,
fill = "gray80"
)
}
plot <- plot + geom_line(
data = concordances,
aes(
x = start + 0.5 * binSize,
y = concordance,
color = replicate
)
)
if (points) {
plot <- plot + geom_point(
data = concordances,
aes(
x = start + 0.5 * binSize,
y = concordance,
color = replicate
)
)
}
plot <- (
plot +
labs(caption = caption) +
xlab("position") +
xlim(xlim[1], xlim[2] + binSize) +
ylim(ylim) +
geom_hline(yintercept = 0.0, linewidth = 0.1) +
facet_grid(rows = vars(condition), margins = FALSE, switch = "y") +
labs(title = paste0("Concordances of chromosome ",
chromosomeName, " by condition")) +
theme_minimal() +
theme(axis.title.x = element_text(size = 10)) +
theme(
axis.title.y = element_blank(),
axis.line.y = element_blank(),
axis.ticks = element_blank(),
legend.position = "bottom",
legend.title = element_text(size = 8),
legend.text = element_text(size = 8),
strip.placement = "outside"
)
)
return(plot)
}
<file_sep>#' @description
#' Reduces a \code{\link{HiCDOCDataSet}} by keeping only given chromosomes.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomeNames
#' The names of chromosomes to keep.
#' @param dropLevels
#' Whether or not to also remove unused factor levels after filtering. Should
#' be set to FALSE if the reduced objects are meant to be re-combined later.
#' Defaults to TRUE.
#'
#' @return
#' A reduced \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.reduceHiCDOCChromosomes <- function(object, chromosomeNames, dropLevels) {
chromosomeIds <- which(object@chromosomes %in% chromosomeNames)
object@chromosomes <- object@chromosomes[chromosomeIds]
object@weakBins <- object@weakBins[chromosomeIds]
object@totalBins <- object@totalBins[chromosomeIds]
object@validAssay <- object@validAssay[chromosomeIds]
toKeep <- S4Vectors::`%in%`(
S4Vectors::mcols(object)$chromosome,
chromosomeNames
)
object <- object[toKeep, ]
if (dropLevels) {
SummarizedExperiment::mcols(object)$chromosome <- droplevels(
SummarizedExperiment::mcols(object)$chromosome
)
object <- InteractionSet::reduceRegions(object)
GenomeInfoDb::seqlevels(
InteractionSet::regions(object),
pruning.mode = "coarse"
) <- object@chromosomes
}
for (slotName in c(
"distances",
"selfInteractionRatios",
"compartments",
"concordances",
"differences",
"centroids",
"comparisons"
)) {
if (!is.null(slot(object, slotName))) {
tmp <- slot(object, slotName)
if (is(tmp, "data.table")) {
tmp <- tmp[chromosome %in% chromosomeNames]
if (dropLevels) {
tmp[, chromosome := droplevels(chromosome)]
}
} else {
if (!is(tmp, "GRanges")) {
stop("malformed HiCDOCDataSet")
}
if (dropLevels) {
GenomeInfoDb::seqlevels(
tmp,
pruning.mode = "coarse"
) <- object@chromosomes
} else {
tmp <- tmp[S4Vectors::`%in%`(tmp@seqnames, chromosomeNames)]
}
}
slot(object, slotName) <- tmp
}
}
return(object)
}
#' @description
#' Reduces a \code{\link{HiCDOCDataSet}} by keeping only given conditions.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param conditionNames
#' The names of conditions to keep.
#' @param dropLevels
#' Whether or not to also remove unused factor levels after filtering. Should
#' be set to FALSE if the reduced objects are meant to be re-combined later.
#' Defaults to TRUE.
#'
#' @return
#' A reduced \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.reduceHiCDOCConditions <- function(object, conditionNames, dropLevels) {
conditionIds <- which(object$condition %in% conditionNames)
object <- object[, conditionIds]
object@validAssay <- .determineValids(object)
for (slotName in c(
"distances",
"selfInteractionRatios",
"compartments",
"concordances",
"centroids"
)) {
if (!is.null(slot(object, slotName))) {
tmp <- slot(object, slotName)
if (is(tmp, "data.table")) {
tmp <- tmp[condition %in% conditionNames]
if (dropLevels) {
tmp[, condition := droplevels(condition)]
}
} else {
if (!is(tmp, "GRanges")) {
stop("malformed HiCDOCDataSet")
}
tmp <- tmp[tmp$condition %in% conditionNames]
if (dropLevels) {
tmp$condition <- droplevels(tmp$condition)
}
}
slot(object, slotName) <- tmp
}
}
return(object)
}
#' @description
#' Reduces a \code{\link{HiCDOCDataSet}} by keeping only given replicates.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param replicateNames
#' The names of replicates to keep.
#' @param dropLevels
#' Whether or not to also remove unused factor levels after filtering. Should
#' be set to FALSE if the reduced objects are meant to be re-combined later.
#' Defaults to TRUE.
#'
#' @return
#' A reduced \code{\link{HiCDOCDataSet}}.
#'
#' @keywords internal
#' @noRd
.reduceHiCDOCReplicates <- function(object, replicateNames, dropLevels) {
replicateIds <- which(object$replicate %in% replicateNames)
object <- object[, replicateIds]
object@validAssay <- .determineValids(object)
for (slotName in c(
"distances",
"selfInteractionRatios",
"concordances"
)) {
if (!is.null(slot(object, slotName))) {
tmp <- slot(object, slotName)
if (is(tmp, "data.table")) {
tmp <- tmp[replicate %in% replicateNames]
if (dropLevels) {
tmp[, replicate := droplevels(replicate)]
}
} else {
if (!is(tmp, "GRanges")) {
stop("malformed HiCDOCDataSet")
}
tmp <- tmp[tmp$replicate %in% replicateNames]
if (dropLevels) {
tmp$replicate <- droplevels(tmp$replicate)
}
}
slot(object, slotName) <- tmp
}
}
return(object)
}
#' @title
#' Reduce a \code{\link{HiCDOCDataSet}}.
#'
#' @description
#' Reduces a \code{\link{HiCDOCDataSet}} by keeping only given chromosomes,
#' conditions, or replicates.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosomes
#' The chromosome names or indices in \code{chromosomes(object)} to keep.
#' Defaults to NULL.
#' @param conditions
#' The condition names in \code{sampleConditions(object)} to keep.
#' Defaults to NULL.
#' @param replicates
#' The replicate names in \code{sampleReplicates(object)} to keep.
#' Defaults to NULL.
#' @param dropLevels
#' Whether or not to also remove unused factor levels after filtering. Should
#' be set to FALSE if the reduced objects are meant to be re-combined later.
#' Defaults to TRUE.
#'
#' @return
#' A reduced \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' reduced <- reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = c(1, 2))
#'
#' @export
reduceHiCDOCDataSet <- function(
object,
chromosomes = NULL,
conditions = NULL,
replicates = NULL,
dropLevels = TRUE
) {
if (!is.null(object@differences)) {
warning(
"You should not reduce a HiCDOCDataSet after calling ",
"'detectCompartments()'. All chromosomes, conditions, and ",
"replicates have been used in the computations.",
call. = FALSE
)
}
if (!is.null(chromosomes)) {
chromosomeNames <- .validateNames(object, chromosomes, "chromosomes")
object <- .reduceHiCDOCChromosomes(object, chromosomeNames, dropLevels)
}
if (!is.null(conditions)) {
conditionNames <- .validateNames(object, conditions, "conditions")
object <- .reduceHiCDOCConditions(object, conditions, dropLevels)
}
if (!is.null(replicates)) {
replicateNames <- .validateNames(object, replicates, "replicates")
object <- .reduceHiCDOCReplicates(object, replicates, dropLevels)
}
return(object)
}
<file_sep>test_that("plotSelfInteractionRatios returns an error if no compartments", {
data(exampleHiCDOCDataSet)
expect_error(
pp <- plotSelfInteractionRatios(exampleHiCDOCDataSet),
"No compartments found."
)
})
test_that("plotSelfInteractionRatios behaves as expected", {
data(exampleHiCDOCDataSetProcessed)
expect_error(
plotSelfInteractionRatios(exampleHiCDOCDataSetProcessed),
'"chromosome"')
expect_error(
plotSelfInteractionRatios(exampleHiCDOCDataSetProcessed, 4),
"Unknown")
pp <- plotSelfInteractionRatios(exampleHiCDOCDataSetProcessed, 1)
expect_is(pp, "ggplot")
expect_identical(
pp$labels,
list(
"caption" = "Quality control:\nA/B assignment reliability: OK",
"colour" = "Compartment",
"x" = "Compartment",
"y" = "Interaction difference",
"title" = paste0("Differences between self-interactions ",
"and other interactions"),
"subtitle" = "Chromosome X"
)
)
expect_is(pp$layers[[1]]$geom, "GeomPoint")
expect_is(pp$layers[[2]]$geom, "GeomBoxplot")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep>#' @title
#' Plot the distance effect.
#'
#' @description
#' Plots the distance effect on proportion of interactions.
#' Each point is a cell in the interaction matrix, such that
#' the x-axis is the distance with respect to the diagonal,
#' the y-axis is number of counts.
#' Dots are binned.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' Name (character) or index of the chromosome, if the plot should be
#' restricted to only one chromosome. Default to NULL.
#' @param transformX
#' Transformation of the X axis. Default to "identity". See
#' \code{\link[ggplot2]{scale_x_continuous}} for other accepted values.
#' @param transformY
#' Transformation of the Y axis. Default to "identity". See
#' \code{\link[ggplot2]{scale_y_continuous}} for other accepted values.
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' plotDistanceEffect(exampleHiCDOCDataSet)
#'
#' @export
plotDistanceEffect <- function(object, chromosome = NULL, transformX="identity", transformY="identity") {
.validateSlots(object, slots = c("interactions"))
if (!is.null(chromosome)) {
if (length(chromosome) > 1) {
warning(
"`chromosome` should be of length 1, taking the first one."
)
chromosome < chromosome[1]
}
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
rowsId <- as.logical(
S4Vectors::mcols(object)$chromosome == chromosomeName
)
addTitle <- paste(", chromosome", chromosomeName)
} else {
rowsId <- rep(TRUE, length(object))
addTitle <- ""
}
distances <- InteractionSet::pairdist(object, type = "mid")[rowsId]
matrixAssay <- SummarizedExperiment::assay(object)[rowsId, ]
dfDistance <- data.table::data.table(
"distance" = rep(distances, ncol(matrixAssay)),
"interaction" = as.vector(matrixAssay)
)
dfDistance <- dfDistance[!is.na(interaction)]
plot <- ggplot(
dfDistance,
aes(x = distance, y = interaction)
) + geom_bin2d() + scale_fill_gradient(
low = "white",
high = "blue",
trans = "log2"
) + geom_point(col = "transparent") +
geom_smooth(col = "red") +
scale_y_continuous(trans=transformY) +
scale_x_continuous(trans=transformX) + theme_bw()
margPlot <- ggplot(dfDistance, aes(x = distance)) +
geom_histogram(fill="transparent", col="black") +
theme_minimal() +
theme(panel.grid.major.x = element_blank(),
axis.title.x = element_blank(),
axis.text.x = element_blank(),
axis.ticks.x = element_blank())
layoutMatrix <- rbind(c(1,1,1,1,1,1,1,3),
c(2,2,2,2,2,2,2,3),
c(2,2,2,2,2,2,2,3),
c(2,2,2,2,2,2,2,3),
c(2,2,2,2,2,2,2,3))
plot <- gridExtra::arrangeGrob(
margPlot,
plot + theme(legend.position = "none"),
cowplot::get_legend(plot),
layout_matrix = layoutMatrix,
padding = unit(0.2, "lines"),
top = paste0("Distance effect", addTitle)
)
return(cowplot::ggdraw(plot))
}
<file_sep>test_that("plotCompartmentChanges returns error if no compartments", {
data(exampleHiCDOCDataSet)
expect_error(
pp <- plotCompartmentChanges(exampleHiCDOCDataSet),
"No compartments found."
)
})
test_that("plotCompartmentChanges behaves as expected", {
data(exampleHiCDOCDataSetProcessed)
expect_error(
plotCompartmentChanges(exampleHiCDOCDataSetProcessed),
"argument \"chromosome\""
)
expect_error(
plotCompartmentChanges(exampleHiCDOCDataSetProcessed, 5),
"Unknown"
)
pp <- plotCompartmentChanges(exampleHiCDOCDataSetProcessed, 1)
expect_is(pp, "ggplot")
expect_is(pp$labels, "list")
expect_equal(length(pp$labels), 0)
expect_is(pp$layers[[1]]$geom, "GeomDrawGrob")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep>// Based on
// https://github.com/aidenlab/straw/blob/master/R/src/straw-R.cpp
/*
The MIT License (MIT)
Copyright (c) 2011-2016 Broad Institute, Aiden Lab
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Straw: fast C++ implementation of dump. Not as fully featured as the Java
version. Reads the .hic file, finds the appropriate matrix and slice of data,
and outputs as text in sparse upper triangular format.
Currently only supporting matrices.
*/
#include <Rcpp.h>
#include <cstring>
#include <cstdint>
#include <iostream>
#include <fstream>
#include <sstream>
#include <map>
#include <set>
#include <vector>
#include <streambuf>
#include "zlib.h"
using namespace Rcpp;
// this is for creating a stream from a byte array for ease of use
struct membuf : std::streambuf {
membuf(char* begin, char* end) {
this->setg(begin, begin, end);
}
};
// stores input information
struct hicInfo {
int64_t master;
std::vector <int> availableResolutions;
int resolution;
int selectedResolutionId;
int32_t version;
CharacterVector chromosomes;
std::vector <long> chromosomeLengths;
int32_t totalChromosomes;
bool firstChromosomeIsAll;
std::streampos pos;
};
// stores output information
struct outputStr {
std::vector<int> chromosome;
std::vector<int> bin1;
std::vector<int> bin2;
std::vector<int> count;
};
char readCharFromFile(std::istream &fin, hicInfo &info) {
char tempChar;
fin.read(&tempChar, 1);
info.pos += 1;
return tempChar;
}
int16_t readInt16FromFile(std::istream &fin, hicInfo &info) {
int16_t tempInt16;
fin.read((char *) &tempInt16, 2);
info.pos += 2;
return tempInt16;
}
int32_t readInt32FromFile(std::istream &fin, hicInfo &info) {
int32_t tempInt32;
fin.read((char *) &tempInt32, 4);
info.pos += 4;
return tempInt32;
}
int64_t readInt64FromFile(std::istream &fin, hicInfo &info) {
int64_t tempInt64;
fin.read((char *) &tempInt64, 8);
info.pos += 8;
return tempInt64;
}
float readFloatFromFile(std::istream &fin, hicInfo &info) {
float tempFloat;
fin.read((char *) &tempFloat, sizeof(float));
info.pos += sizeof(float);
return tempFloat;
}
double readDoubleFromFile(std::istream &fin, hicInfo &info) {
double tempDouble;
fin.read((char *) &tempDouble, sizeof(double));
info.pos += sizeof(double);
return tempDouble;
}
void readString(std::istream &fin, std::string &s, hicInfo &info) {
getline(fin, s, '\0');
info.pos += fin.gcount();
}
void readBytes(std::istream &fin, char *b, int size, hicInfo &info) {
fin.read(b, size);
info.pos += size;
}
// The file position is internally managed, because
// Windows do not synchronize read and tellg...
void setFilePos(std::istream &fin, std::streampos pos, hicInfo &info) {
fin.seekg(pos, std::ios::beg);
info.pos = pos;
}
// returns whether or not this is valid HiC file
bool readMagicString(std::istream &fin, hicInfo &info) {
std::string str;
readString(fin, str, info);
return str[0] == 'H' && str[1] == 'I' && str[2] == 'C';
}
void readHeader(std::istream &fin, hicInfo &info) {
info.selectedResolutionId = -1;
if (!readMagicString(fin, info)) {
stop("Hi-C magic string is missing, does not appear to be a hic file.");
}
info.version = readInt32FromFile(fin, info);
if (info.version < 6) {
stop("Version " + std::to_string(info.version) + " no longer supported.");
}
std::string genome;
info.master = readInt64FromFile(fin, info);
readString(fin, genome, info);
if (info.version > 8) {
readInt64FromFile(fin, info); // nviPosition
readInt64FromFile(fin, info); // nviLength
}
int32_t totalAttributes = readInt32FromFile(fin, info);
// reading and ignoring attribute-value dictionary
for (int i = 0; i < totalAttributes; i++) {
std::string key, value;
readString(fin, key, info);
readString(fin, value, info);
}
info.totalChromosomes = readInt32FromFile(fin, info);
// chromosome map for finding matrix
for (int i = 0; i < info.totalChromosomes; i++) {
std::string name;
int32_t length;
readString(fin, name, info);
if (info.version > 8) {
length = readInt64FromFile(fin, info);
} else {
length = (int64_t) readInt32FromFile(fin, info);
}
info.chromosomes.push_back(name);
info.chromosomeLengths.push_back(length);
}
int32_t totalResolutions = readInt32FromFile(fin, info);
for (int i = 0; i < totalResolutions; i++) {
int32_t resolution = readInt32FromFile(fin, info);
info.availableResolutions.push_back(resolution);
if (resolution == info.resolution) {
info.selectedResolutionId = i;
}
}
info.firstChromosomeIsAll = (
info.chromosomes[0] == "ALL" || info.chromosomes[0] == "All"
);
}
// This is the meat of reading the data. Takes in the block number and returns
// the set of contact records corresponding to that block. The block data is
// compressed and must be decompressed using the zlib library functions.
void readBlock(
std::istream &fin,
int64_t position,
int32_t size,
int32_t chromosomeId,
hicInfo &info,
outputStr &output
) {
if (size == 0) {
return;
}
std::vector<int> chromosomeIds, bins1, bins2, counts;
char* compressedBytes = new char[size];
char* uncompressedBytes = new char[size*10]; // biggest seen so far is 3
setFilePos(fin, position, info);
readBytes(fin, compressedBytes, size, info);
// Decompress the block
// zlib struct
z_stream infstream;
infstream.zalloc = Z_NULL;
infstream.zfree = Z_NULL;
infstream.opaque = Z_NULL;
infstream.avail_in = (uInt)(size); // size of input
infstream.next_in = (Bytef *) compressedBytes; // input char array
infstream.avail_out = (uInt)size*10; // size of output
infstream.next_out = (Bytef *)uncompressedBytes; // output char array
// the actual decompression work
inflateInit(&infstream);
inflate(&infstream, Z_NO_FLUSH);
inflateEnd(&infstream);
int uncompressedSize = infstream.total_out;
// create stream from buffer for ease of use
membuf sbuf(uncompressedBytes, uncompressedBytes + uncompressedSize);
std::istream bufferIn(&sbuf);
int32_t totalRecords = readInt32FromFile(bufferIn, info);
bins1.reserve(totalRecords);
bins2.reserve(totalRecords);
counts.reserve(totalRecords);
// different versions have different specific formats
if (info.version < 7) {
for (int i = 0; i < totalRecords; i++) {
int32_t binX = readInt32FromFile(bufferIn, info);
int32_t binY = readInt32FromFile(bufferIn, info);
float c = readFloatFromFile(bufferIn, info);
bins1.push_back(binX);
bins2.push_back(binY);
counts.push_back(c);
}
} else {
int32_t binXOffset = readInt32FromFile(bufferIn, info);
int32_t binYOffset = readInt32FromFile(bufferIn, info);
bool useShort = readCharFromFile(bufferIn, info) == 0; // yes this is opposite of usual
bool useShortBinX = true;
bool useShortBinY = true;
if (info.version > 8) {
useShortBinX = readCharFromFile(bufferIn, info) == 0;
useShortBinY = readCharFromFile(bufferIn, info) == 0;
}
char type = readCharFromFile(bufferIn, info);
if (type == 1) {
if (useShortBinX && useShortBinY) {
int16_t rowCount = readInt16FromFile(bufferIn, info);
for (int i = 0; i < rowCount; i++) {
int32_t binY = binYOffset + readInt16FromFile(bufferIn, info);
int16_t colCount = readInt16FromFile(bufferIn, info);
for (int j = 0; j < colCount; j++) {
int32_t binX = binXOffset + readInt16FromFile(bufferIn, info);
float c;
if (useShort) {
c = readInt16FromFile(bufferIn, info);
} else {
c = readFloatFromFile(bufferIn, info);
}
bins1.push_back(binX);
bins2.push_back(binY);
counts.push_back(c);
}
}
} else if (useShortBinX && !useShortBinY) {
int32_t rowCount = readInt32FromFile(bufferIn, info);
for (int i = 0; i < rowCount; i++) {
int32_t binY = binYOffset + readInt32FromFile(bufferIn, info);
int16_t colCount = readInt16FromFile(bufferIn, info);
for (int j = 0; j < colCount; j++) {
int32_t binX = binXOffset + readInt16FromFile(bufferIn, info);
float c;
if (useShort) {
c = readInt16FromFile(bufferIn, info);
} else {
c = readFloatFromFile(bufferIn, info);
}
bins1.push_back(binX);
bins2.push_back(binY);
counts.push_back(c);
}
}
} else if (!useShortBinX && useShortBinY) {
int16_t rowCount = readInt16FromFile(bufferIn, info);
for (int i = 0; i < rowCount; i++) {
int32_t binY = binYOffset + readInt16FromFile(bufferIn, info);
int32_t colCount = readInt32FromFile(bufferIn, info);
for (int j = 0; j < colCount; j++) {
int32_t binX = binXOffset + readInt32FromFile(bufferIn, info);
float c;
if (useShort) {
c = readInt16FromFile(bufferIn, info);
} else {
c = readFloatFromFile(bufferIn, info);
}
bins1.push_back(binX);
bins2.push_back(binY);
counts.push_back(c);
}
}
} else {
int32_t rowCount = readInt32FromFile(bufferIn, info);
for (int i = 0; i < rowCount; i++) {
int32_t binY = binYOffset + readInt32FromFile(bufferIn, info);
int32_t colCount = readInt32FromFile(bufferIn, info);
for (int j = 0; j < colCount; j++) {
int32_t binX = binXOffset + readInt32FromFile(bufferIn, info);
float c;
if (useShort) {
c = readInt16FromFile(bufferIn, info);
} else {
c = readFloatFromFile(bufferIn, info);
}
bins1.push_back(binX);
bins2.push_back(binY);
counts.push_back(c);
}
}
}
} else if (type == 2) {
int32_t nPts = readInt32FromFile(bufferIn, info);
int16_t w = readInt16FromFile(bufferIn, info);
for (int i = 0; i < nPts; i++) {
int32_t row = i / w;
int32_t col = i - row * w;
int32_t bin1 = binXOffset + col;
int32_t bin2 = binYOffset + row;
if (useShort) {
int16_t c = readInt16FromFile(bufferIn, info);
if (c != -32768) {
bins1.push_back(bin1);
bins2.push_back(bin2);
counts.push_back(c);
}
} else {
float c = readFloatFromFile(bufferIn, info);
if (!std::isnan(c)) {
bins1.push_back(bin1);
bins2.push_back(bin2);
counts.push_back(c);
}
}
}
}
}
chromosomeIds = std::vector<int>(bins1.size(), chromosomeId);
output.chromosome.insert(
output.chromosome.end(),
chromosomeIds.begin(),
chromosomeIds.end()
);
output.bin1.insert(output.bin1.end(), bins1.begin(), bins1.end());
output.bin2.insert(output.bin2.end(), bins2.begin(), bins2.end());
output.count.insert(output.count.end(), counts.begin(), counts.end());
delete[] compressedBytes;
delete[] uncompressedBytes; // don't forget to delete your heap arrays in C++!
}
// Reads the raw binned contact matrix at specified resolution, setting the
// block bin count and block column count.
void readMatrix(
std::istream &fin,
int64_t start,
hicInfo &info,
outputStr &output
) {
std::streampos pos;
if (start != -1) {
setFilePos(fin, start, info);
int32_t chromosomeId1 = readInt32FromFile(fin, info);
int32_t chromosomeId2 = readInt32FromFile(fin, info);
int32_t totalResolutions = readInt32FromFile(fin, info);
if (chromosomeId1 == chromosomeId2) {
if ((! info.firstChromosomeIsAll) || (chromosomeId1 != 0)) {
for (
int resolutionId = 0;
resolutionId < totalResolutions;
++resolutionId
) {
std::string unit;
readString(fin, unit, info);
readInt32FromFile(fin, info); // resIdx
readFloatFromFile(fin, info); // sumCounts
readFloatFromFile(fin, info); // occupiedCellCount
readFloatFromFile(fin, info); // stdDev
readFloatFromFile(fin, info); // percent95
readInt32FromFile(fin, info); // binSize
readInt32FromFile(fin, info); // totalBlockBins
readInt32FromFile(fin, info); // totalBlockColumns
int32_t totalBlocks = readInt32FromFile(fin, info);
for (int i = 0; i < totalBlocks; i++) {
readInt32FromFile(fin, info); // blockId
int64_t blockPosition = readInt64FromFile(fin, info);
int32_t blockSize = readInt32FromFile(fin, info);
if (resolutionId == info.selectedResolutionId) {
pos = info.pos;
readBlock(
fin, blockPosition, blockSize, chromosomeId1, info, output
);
setFilePos(fin, pos, info);
}
}
}
}
}
}
}
// Reads the footer from the master pointer location. Takes in the chromosomes,
// norm, unit (BP or FRAG) and resolution or binsize, and sets the file position
// of the matrix and the normalization vectors for those chromosomes at the
// given normalization and resolution.
void readFooter(std::istream& fin, hicInfo &info, outputStr &output) {
std::streampos pos;
setFilePos(fin, info.master, info);
if (info.version > 8) {
readInt64FromFile(fin, info); // totalBytes
} else {
readInt32FromFile(fin, info); // totalBytes
}
int32_t totalEntries = readInt32FromFile(fin, info);
for (int i = 0; i < totalEntries; i++) {
std::string str;
readString(fin, str, info);
int64_t fpos = readInt64FromFile(fin, info);
readInt32FromFile(fin, info); // sizeInBytes
pos = info.pos;
readMatrix(fin, fpos, info, output);
setFilePos(fin, pos, info);
}
}
// [[Rcpp::export]]
DataFrame parseHiCFile(std::string &fname, int resolution) {
hicInfo info;
outputStr output;
std::ifstream fin;
fin.open(fname, std::fstream::in | std::fstream::binary);
if (!fin) {
stop("File " + fname + " cannot be opened for reading.");
}
info.resolution = resolution;
readHeader(fin, info);
if (info.selectedResolutionId == -1) {
Rcerr << "Cannot find resolution " << resolution << ".\n";
Rcerr << "Available resolutions:\n";
for (int resolution: info.availableResolutions) {
Rcerr << "\t" << resolution << "\n";
}
stop("Exiting.");
}
readFooter(fin, info, output);
// Transform C++ vectors to R vectors and factors
IntegerVector chromosomes, bins1, bins2, counts;
chromosomes = wrap(output.chromosome);
bins1 = wrap(output.bin1);
bins2 = wrap(output.bin2);
counts = wrap(output.count);
if (info.firstChromosomeIsAll) info.chromosomes.erase(0);
else {
// factors start with 1 in R
chromosomes = chromosomes + 1;
}
chromosomes.attr("class") = "factor";
chromosomes.attr("levels") = info.chromosomes;
DataFrame outputR = DataFrame::create(
_["chromosome"] = chromosomes,
_["position 1"] = bins1 * resolution,
_["position 2"] = bins2 * resolution,
_["interaction"] = counts
);
outputR.attr("class") = Rcpp::CharacterVector::create("data.table", "data.frame");
return outputR;
}
<file_sep>#' @title
#' Plot centroids.
#'
#' @description
#' Plots the result of the PCA on the compartments' centroids.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' @param size
#' Size of each point. Defaults to 2.
#' @param checks
#' Whether or not to add sanity checks messages on centroids. Default to TRUE.
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' plotCentroids(exampleHiCDOCDataSetProcessed, chromosome = 1)
#'
#' @export
plotCentroids <- function(object, chromosome, size = 2, checks=TRUE) {
.validateSlots(object, slots = "centroids")
if (length(chromosome) > 1) {
warning(
"`chromosome` should be of length 1, ",
"taking the first one"
)
chromosome < chromosome[1]
}
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
pcaData <- .computePca(object, chromosomeName)
pca <- pcaData$PCA
propvar <- pcaData$propvar
propvar <- paste(round(100 * propvar, 2), "%")
plot <- ggplot(
pca,
aes(
x = PC1,
y = PC2,
color = compartment,
shape = condition
)
) + geom_point(size = size) + labs(
title = paste0("PCA on centroids of chromosome ", chromosomeName),
x = paste("PC1 ", propvar[1]),
y = paste("PC2 ", propvar[2])
)
if(checks){
messages <- .messageCheck(object, chromosomeName)
plot <- plot +
labs(caption = paste("Quality controls:",
messages$PC1, messages$centroids, sep="\n"))
}
return(plot)
}
<file_sep>#' @title
#' Plot boxplots of self interaction ratios.
#'
#' @description
#' Plots the boxplots of self interaction ratios, which are the differences
#' between self interaction and median of other interactions for each genomic
#' position. Since the A compartment is open with more interactions overall, it
#' is assumed that self interaction ratios in compartment A are smaller than in
#' compartment B.
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#' @param chromosome
#' A chromosome name or index in \code{chromosomes(object)}.
#' A \code{\link{HiCDOCDataSet}}.
#' @param checks
#' Logical. Should sanity checks messages be printed on plot ? Default to TRUE.
#'
#' @return
#' A \code{ggplot}.
#'
#' @examples
#' data(exampleHiCDOCDataSetProcessed)
#' plotSelfInteractionRatios(exampleHiCDOCDataSetProcessed, chromosome = 1)
#'
#' @export
plotSelfInteractionRatios <- function(object, chromosome, checks=TRUE) {
.validateSlots(object, slots = c("selfInteractionRatios", "compartments"))
chromosomeName <- .validateNames(object, chromosome, "chromosomes")
compartements <- as.data.table(
object@compartments[
GenomeInfoDb::seqnames(object@compartments) == chromosomeName
]
)
dataplot <- data.table::merge.data.table(
object@selfInteractionRatios[chromosome == chromosomeName],
compartements[, .(
chromosome = seqnames,
condition,
index,
compartment
)],
by = c("chromosome", "condition", "index"),
all.x = TRUE
)
plot <- ggplot(
dataplot,
aes(x = compartment, y = ratio)
) + geom_jitter(
aes(color = compartment),
width = 0.35
) + geom_boxplot(
outlier.colour = NA,
fill = NA,
colour = "grey20"
) + labs(
color = "Compartment",
x = "Compartment",
y = "Interaction difference",
title = paste0(
"Differences between self-interactions ",
"and other interactions"
),
subtitle = paste0("Chromosome ", chromosomeName)
)
if(checks){
messages <- .messageCheck(object, chromosomeName)
plot <- plot + labs(caption=paste0("Quality control:\n",
messages$assignment))
}
return(plot)
}
<file_sep>test_that("plotDistanceEffect behaves as expected", {
data(exampleHiCDOCDataSet)
object <- reduceHiCDOCDataSet(exampleHiCDOCDataSet, chromosomes = "X")
expect_message(pp <- plotDistanceEffect(exampleHiCDOCDataSet))
expect_is(pp, "ggplot")
# No error when printed
expect_error(print(pp), NA)
})
<file_sep># Generated by using Rcpp::compileAttributes() -> do not edit by hand
# Generator token: <PASSWORD>
constrainedClustering <- function(rMatrix, rLinks, maxDelta = 0.0001, maxIterations = 50L, totalRestarts = 20L, totalClusters = 2L) {
.Call('_HiCDOC_constrainedClustering', PACKAGE = 'HiCDOC', rMatrix, rLinks, maxDelta, maxIterations, totalRestarts, totalClusters)
}
parseHiCFile <- function(fname, resolution) {
.Call('_HiCDOC_parseHiCFile', PACKAGE = 'HiCDOC', fname, resolution)
}
<file_sep># HiCDOC: Compartments prediction and differential analysis with multiple replicates
HiCDOC normalizes intrachromosomal Hi-C matrices, uses unsupervised learning to
predict A/B compartments from multiple replicates, and detects significant
compartment changes between experiment conditions.
It provides a collection of functions assembled into a pipeline:
1. [Filter](#filtering-data):
1. Remove chromosomes which are too small to be useful.
2. Filter sparse replicates to remove uninformative replicates with few
interactions.
3. Filter positions (*bins*) which have too few interactions.
2. [Normalize](#normalizing-biases):
1. Normalize technical biases using
[cyclic loess normalization][multihiccompare-publication], so that
matrices are comparable.
2. Normalize biological biases using
[Knight-Ruiz matrix balancing][knight-ruiz-publication], so that
all the bins are comparable.
3. Normalize the distance effect, which results from higher interaction
proportions between closer regions, with a MD loess.
3. [Predict](#predicting-compartments-and-differences):
1. Predict compartments using
[constrained K-means][constrained-k-means-publication].
2. Detect significant differences between experiment conditions.
4. [Visualize](#visualizing-data-and-results):
1. Plot the interaction matrices of each replicate.
2. Plot the overall distance effect on the proportion of interactions.
3. Plot the compartments in each chromosome, along with their concordance
(confidence measure) in each replicate, and significant changes between
experiment conditions.
4. Plot the overall distribution of concordance differences.
5. Plot the result of the PCA on the compartments' centroids.
6. Plot the boxplots of self interaction ratios (differences between self
interactions and the medians of other interactions) of each compartment,
which is used for the A/B classification.
# Table of contents
* [Installation](#installation)
* [Quick Start](#quick-start)
* [Usage](#quick-start)
* [Importing Hi-C data](#importing-hi-c-data)
* [Tabular files](#tabular-files)
* [Cooler files](#cooler-files)
* [Juicer files](#juicer-files)
* [HiC-Pro files](#hic-pro-files)
* [Running the HiCDOC pipeline](#running-the-hicdoc-pipeline)
* [Filtering data](#filtering-data)
* [Normalizing biases](#normalizing-biases)
* [Predicting compartments and differences](#predicting-compartments-and-differences)
* [Visualizing data and results](#visualizing-data-and-results)
* [References](#references)
# Installation
To install, execute the following commands in your console:
```bash
Rscript -e 'install.packages("devtools")'
Rscript -e 'devtools::install_github("mzytnicki/HiCDOC")'
```
After installation, the package can be loaded in R >= 4.0:
```r
library("HiCDOC")
```
# Quick Start
To try out HiCDOC, load the simulated toy data set:
```r
data(exampleHiCDOCDataSet)
hic.experiment <- exampleHiCDOCDataSet
```
Then run the default pipeline on the created object:
```r
hic.experiment <- HiCDOC(hic.experiment)
```
And plot some results:
```r
plotCompartmentChanges(hic.experiment, chromosome = 'Y')
```
# Usage
## Importing Hi-C data
HiCDOC can import Hi-C data sets in various different formats:
- Tabular `.tsv` files.
- Cooler `.cool` or `.mcool` files.
- Juicer `.hic` files.
- HiC-Pro `.matrix` and `.bed` files.
### Tabular files
A tabular file is a tab-separated multi-replicate sparse matrix with a header:
chromosome position 1 position 2 C1.R1 C1.R2 C2.R1 ...
3 1500000 7500000 145 184 72 ...
...
The interaction proportions between `position 1` and `position 2` of
`chromosome` are reported in each `condition.replicate` column. There is no
limit to the number of conditions and replicates.
To load Hi-C data in this format:
```r
hic.experiment <- HiCDOCDataSetFromTabular('path/to/data.tsv')
```
### Cooler files
To load `.cool` or `.mcool` files generated by [Cooler][cooler-documentation]:
```r
# Path to each file
paths = c(
'path/to/condition-1.replicate-1.cool',
'path/to/condition-1.replicate-2.cool',
'path/to/condition-2.replicate-1.cool',
'path/to/condition-2.replicate-2.cool',
'path/to/condition-3.replicate-1.cool'
)
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- c(1, 2, 1, 2, 1)
conditions <- c(1, 1, 2, 2, 3)
# Resolution to select in .mcool files
binSize = 500000
# Instantiation of data set
hic.experiment <- HiCDOCDataSetFromCool(
paths,
replicates = replicates,
conditions = conditions,
binSize = binSize # Specified for .mcool files.
)
```
### Juicer files
To load `.hic` files generated by [Juicer][juicer-documentation]:
```r
# Path to each file
paths = c(
'path/to/condition-1.replicate-1.hic',
'path/to/condition-1.replicate-2.hic',
'path/to/condition-2.replicate-1.hic',
'path/to/condition-2.replicate-2.hic',
'path/to/condition-3.replicate-1.hic'
)
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- c(1, 2, 1, 2, 1)
conditions <- c(1, 1, 2, 2, 3)
# Resolution to select
binSize <- 500000
# Instantiation of data set
hic.experiment <- HiCDOCDataSetFromHiC(
paths,
replicates = replicates,
conditions = conditions,
binSize = binSize
)
```
### HiC-Pro files
To load `.matrix` and `.bed` files generated by [HiC-Pro][hicpro-documentation]:
```r
# Path to each matrix file
matrixPaths = c(
'path/to/condition-1.replicate-1.matrix',
'path/to/condition-1.replicate-2.matrix',
'path/to/condition-2.replicate-1.matrix',
'path/to/condition-2.replicate-2.matrix',
'path/to/condition-3.replicate-1.matrix'
)
# Path to each bed file
bedPaths = c(
'path/to/condition-1.replicate-1.bed',
'path/to/condition-1.replicate-2.bed',
'path/to/condition-2.replicate-1.bed',
'path/to/condition-2.replicate-2.bed',
'path/to/condition-3.replicate-1.bed'
)
# Replicate and condition of each file. Can be names instead of numbers.
replicates <- c(1, 2, 1, 2, 1)
conditions <- c(1, 1, 2, 2, 3)
# Instantiation of data set
hic.experiment <- HiCDOCDataSetFromHiCPro(
matrixPaths = matrixPaths,
bedPaths = bedPaths,
replicates = replicates,
conditions = conditions
)
```
## Running the HiCDOC pipeline
Once your data is loaded, you can run all the filtering, normalization, and
prediction steps with:
```r
hic.experiment <- HiCDOC(hic.experiment)
```
This one-liner runs all the steps detailed below.
### Filtering data
Remove small chromosomes of length smaller than 100 positions:
```r
hic.experiment <- filterSmallChromosomes(hic.experiment, threshold = 100)
```
Remove sparse replicates filled with less than 30% non-zero interactions:
```r
hic.experiment <- filterSparseReplicates(hic.experiment, threshold = 0.3)
```
Remove weak positions with less than 1 interaction in average:
```r
hic.experiment <- filterWeakPositions(hic.experiment, threshold = 1)
```
### Normalizing biases
Normalize technical biases such as sequencing depth:
```r
hic.experiment <- normalizeTechnicalBiases(hic.experiment)
```
Normalize biological biases (such as GC content, number of restriction sites,
etc.):
```r
hic.experiment <- normalizeBiologicalBiases(hic.experiment)
```
Normalize the distance effect resulting from higher interaction proportions
between closer regions:
```r
hic.experiment <- normalizeDistanceEffect(hic.experiment, loessSampleSize = 20000)
```
### Predicting compartments and differences
Predict A and B compartments and detect significant differences:
```r
hic.experiment <- detectCompartments(
hic.experiment,
kMeansDelta = 0.0001,
kMeansIterations = 50,
kMeansRestarts = 20
)
```
## Visualizing data and results
Plot the interaction matrix of each replicate:
```r
plotInteractions(hic.experiment, chromosome = '3')
```
Plot the overall distance effect on the proportion of interactions:
```r
plotDistanceEffect(hic.experiment)
```
List and plot compartments with their concordance (confidence measure) in each
replicate, and significant changes between experiment conditions:
```r
compartments(hic.experiment)
concordances(hic.experiment)
differences(hic.experiment)
plotCompartmentChanges(hic.experiment, chromosome = '3')
```
Plot the overall distribution of concordance differences:
```r
plotConcordanceDifferences(hic.experiment)
```
Plot the result of the PCA on the compartments' centroids:
```r
plotCentroids(hic.experiment, chromosome = '3')
```
Plot the boxplots of self interaction ratios (differences between self
interactions and the median of other interactions) of each compartment:
```r
plotSelfInteractionRatios(hic.experiment, chromosome = '3')
```
# References
<NAME>, <NAME>, <NAME>, multiHiCcompare:
joint normalization and comparative analysis of complex Hi-C experiments,
_Bioinformatics_, 2019, https://doi.org/10.1093/bioinformatics/btz048
<NAME>, <NAME>, A fast algorithm for matrix balancing, _IMA
Journal of Numerical Analysis_, Volume 33, Issue 3, July 2013, Pages 1029–1047,
https://doi.org/10.1093/imanum/drs019
<NAME>, <NAME>, <NAME>, <NAME>, Constrained K-means
Clustering with Background Knowledge, _Proceedings of 18th International
Conference on Machine Learning_, 2001, Pages 577-584,
https://pdfs.semanticscholar.org/0bac/ca0993a3f51649a6bb8dbb093fc8d8481ad4.pdf
[multihiccompare-publication]: https://doi.org/10.1093/bioinformatics/btz048
[knight-ruiz-publication]: https://doi.org/10.1093/imanum/drs019
[constrained-k-means-publication]: https://pdfs.semanticscholar.org/0bac/ca0993a3f51649a6bb8dbb093fc8d8481ad4.pdf
[cooler-documentation]: https://cooler.readthedocs.io/en/latest/
[juicer-documentation]: https://github.com/aidenlab/juicer/wiki/Data
[hicpro-documentation]: https://github.com/nservant/HiC-Pro
<file_sep>test_that("defaultHiCDOCParameters has the expected format", {
expect_is(defaultHiCDOCParameters, "list")
expect_identical(
names(defaultHiCDOCParameters),
c(
"smallChromosomeThreshold",
"sparseReplicateThreshold",
"weakPositionThreshold",
"cyclicLoessSpan",
"loessSampleSize",
"kMeansDelta",
"kMeansIterations",
"kMeansRestarts",
"PC1CheckThreshold"
)
)
})
test_that("defaultHiCDOCParameters has the expected values", {
expect_equal(defaultHiCDOCParameters$smallChromosomeThreshold, 100)
expect_equal(defaultHiCDOCParameters$weakPositionThreshold, 1)
expect_equal(defaultHiCDOCParameters$sparseReplicateThreshold, 0.3)
expect_equal(defaultHiCDOCParameters$cyclicLoessSpan, NA_real_)
expect_equal(defaultHiCDOCParameters$loessSampleSize, 20000)
expect_equal(defaultHiCDOCParameters$kMeansDelta, 1e-04)
expect_equal(defaultHiCDOCParameters$kMeansIterations, 50)
expect_equal(defaultHiCDOCParameters$kMeansRestarts, 20)
expect_equal(defaultHiCDOCParameters$PC1CheckThreshold, 0.75)
})
<file_sep>#' @title
#' Methods to access a \code{\link{HiCDOCDataSet}} components.
#'
#' @name
#' HiCDOCDataSet-methods
#'
#' @description
#' Retrieve information and results from a \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' # Load an example dataset already processed
#' # (i.e. after the detection of compartments)
#' data(exampleHiCDOCDataSetProcessed)
#'
#' exampleHiCDOCDataSetProcessed
#' chromosomes(exampleHiCDOCDataSetProcessed)
#' sampleConditions(exampleHiCDOCDataSetProcessed)
#' sampleReplicates(exampleHiCDOCDataSetProcessed)
#' compartments(exampleHiCDOCDataSetProcessed)
#' differences(exampleHiCDOCDataSetProcessed)
#' concordances(exampleHiCDOCDataSetProcessed)
#'
#' @return
#' A character vector (for \code{chromosomes}, \code{sampleConditions},
#' \code{sampleReplicates}),
#' or a GRanges object
#' (for \code{compartments}, \code{concordances}, \code{differences}).
NULL
#' @describeIn HiCDOCDataSet-methods
#' Retrieves the vector of chromosome names.
#' @usage
#' NULL
#' @export
setGeneric(
name = "chromosomes",
def = function(object) standardGeneric("chromosomes")
)
#### sampleConditions ####
#' @describeIn HiCDOCDataSet-methods
#' Retrieves the vector of condition names, one for each sample.
#' @usage
#' NULL
#' @export
setGeneric(
name = "sampleConditions",
def = function(object) standardGeneric("sampleConditions")
)
#### sampleReplicates ####
#' @describeIn HiCDOCDataSet-methods
#' Retrieves the vector of replicate names, one for each sample.
#' @usage
#' NULL
#' @export
setGeneric(
name = "sampleReplicates",
def = function(object) standardGeneric("sampleReplicates")
)
#### compartments ####
#' @describeIn HiCDOCDataSet-methods
#' Retrieves a \code{GenomicRange} of the compartment of every position
#' in every condition.
#' @param passChecks logical. Display only the concordances/compartments for
#' the chromosomes passing sanity checks.
#' @usage
#' NULL
#' @export
setGeneric(
name = "compartments",
def = function(object, passChecks = TRUE) standardGeneric("compartments")
)
#### differences ####
#' @describeIn HiCDOCDataSet-methods
#' Retrieves a \code{GenomicRange} of the significant compartment differences
#' between conditions, and their p-values.
#' @usage
#' NULL
#' @param object
#' a HiCDOCDataSet object
#' @param threshold
#' a numeric value between 0 and 1. If no threshold, all the differences will
#' be printed even the non significant ones. Otherwise the differences printed
#' are filtered to show the ones with an adjusted p-value <= \code{threshold}.
#' @export
#' @usage
#' NULL
setGeneric(
name = "differences",
def = function(object, threshold = 0.05) standardGeneric("differences")
)
#### concordances ####
#' @describeIn HiCDOCDataSet-methods
#' Retrieves a \code{GenomicRange} of the concordance (confidence in assigned
#' compartment) of every position in every replicate.
#' @usage
#' NULL
#' @export
setGeneric(
name = "concordances",
def = function(object, passChecks = TRUE) standardGeneric("concordances")
)
#' @title
#' Access the parameters of a \code{\link{HiCDOCDataSet}}.
#
#' @name
#' HiCDOCDataSet-parameters
#'
#' @description
#' Retrieves or sets parameters used for filtering, normalization, and
#' prediciton of compartments.
#'
#' @details
#' A \code{\link{HiCDOCDataSet}}'s parameters are automatically set to default
#' values retrieved from \code{\link{defaultHiCDOCParameters}}. They are
#' accessed by filtering, normalization, and compartment detection functions.
#' If those functions are called with custom arguments, the object's
#' parameters are updated to record the actual parameters used. If the
#' object's parameters are customized before calling the functions, the
#' custom parameters will be used.
#'
#' See
#' \code{\link{filterSmallChromosomes}},
#' \code{\link{filterSparseReplicates}},
#' \code{\link{filterWeakPositions}},
#' \code{\link{normalizeDistanceEffect}}, and
#' \code{\link{detectCompartments}},
#' for details on how these parameters are used.
#' \subsection{All parameters are listed here:}{
#' \describe{
#' \item{\code{smallChromosomeThreshold}}{
#' The minimum length (number of positions) for a chromosome to be
#' kept when filtering with \code{\link{filterSmallChromosomes}}.
#' Defaults to
#' \code{defaultHiCDOCParameters$smallChromosomeThreshold} = 100.
#' }
#' \item{\code{sparseReplicateThreshold}}{
#' The minimum percentage of non-zero interactions for a chromosome
#' replicate to be kept when filtering with
#' \code{\link{filterSparseReplicates}}. If a chromosome replicate's
#' percentage of non-zero interactions is lower than this value, it
#' is removed. Defaults to
#' \code{defaultHiCDOCParameters$smallChromosomeThreshold} = 30%.
#' }
#' \item{\code{weakPositionThreshold}}{
#' The minimum average interaction for a position to be kept when
#' filtering with \code{\link{filterWeakPositions}}. If a position's
#' average interaction with the entire chromosome is lower than this
#' value in any of the replicates, it is removed from all replicates
#' and conditions. Defaults to
#' \code{defaultHiCDOCParameters$smallChromosomeThreshold} = 1.
#' }
#' \item{\code{cyclicLoessSpan}}{
#' The span for cyclic loess normalization used in
#' \code{\link{normalizeTechnicalBiases}}. This value is passed to
#' \code{multiHiCcompare::cyclic_loess}.
#' Defaults to NA indicating that span will be automatically
#' calculated using generalized cross validation.
#' For large dataset, it is highly recommended to set this value
#' to reduce computing time and necessary memory.
#' }
#' \item{\code{loessSampleSize}}{
#' The number of positions used as a sample to estimate the effect
#' of distance on proportion of interactions when normalizing with
#' \code{\link{normalizeDistanceEffect}}. Defaults to
#' \code{defaultHiCDOCParameters$loessSampleSize} = 20000.
#' }
#' \item{\code{kMeansDelta}}{
#' The convergence stop criterion for the clustering when detecting
#' compartments with \code{\link{detectCompartments}}. When the
#' centroids' distances between two iterations is lower than this
#' value, the clustering stops. Defaults to
#' \code{defaultHiCDOCParameters$kMeansDelta} = 0.0001.
#' }
#' \item{\code{kMeansIterations}}{
#' The maximum number of iterations during clustering when detecting
#' compartments with \code{\link{detectCompartments}}. Defaults to
#' \code{defaultHiCDOCParameters$kMeansIterations} = 50.
#' }
#' \item{\code{kMeansRestarts}}{
#' The amount of times the clustering is restarted when detecting
#' compartments with \code{\link{detectCompartments}}. For each
#' restart, the clustering iterates until convergence or reaching
#' the maximum number of iterations. The clustering that minimizes
#' inner-cluster variance is selected. Defaults to
#' \code{defaultHiCDOCParameters$kMeansRestarts} = 20.
#' }
#' \item{\code{PC1CheckThreshold}}{
#' The minimum percentage of variance that should be explained by
#' the first principal component of centroids to pass sanity check.
#' Defaults to
#' \code{defaultHiCDOCParameters$PC1CheckThreshold} = 0.75
#' }
#' }
#' }
#'
#' @param object
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#'
#' # Retrieve parameters
#' parameters(exampleHiCDOCDataSet)
#'
#' # Set parameters
#' parameters(exampleHiCDOCDataSet) <- list("smallChromosomeThreshold" = 50)
#' parameters(exampleHiCDOCDataSet) <- list(
#' "weakPositionThreshold" = 10,
#' "kMeansRestarts" = 30
#' )
NULL
#### parameters ####
#' @rdname HiCDOCDataSet-parameters
#' @usage NULL
#' @export
setGeneric(
name = "parameters",
def = function(object) standardGeneric("parameters")
)
#### parameters <- ####
#' @rdname HiCDOCDataSet-parameters
#' @usage NULL
#' @param value a named list containing the names and valued of the
#' parameters to change (see Details).
#' @export
setGeneric(
name = "parameters<-",
def = function(object, value) standardGeneric("parameters<-")
)
<file_sep>test_that("HiCDOCDataSetFromTabular produce correct format", {
path <- system.file(
"extdata",
"liver_18_10M_500000.tsv",
package = "HiCDOC"
)
expect_error(object <- HiCDOCDataSetFromTabular(path), NA)
# Class and slots
expect_is(object, "HiCDOCDataSet")
expect_identical(
slotNames(object),
c("input", "parameters", "chromosomes",
"totalBins", "weakBins", "validAssay",
"compartments", "concordances", "differences", "comparisons",
"distances", "centroids", "selfInteractionRatios", "checks", "interactions",
"colData", "assays", "NAMES", "elementMetadata", "metadata"
)
)
# Class of slots
expect_is(object@input, "character")
expect_is(object@weakBins, "list")
expect_is(object@validAssay, "list")
expect_is(object@chromosomes, "character")
expect_is(object@totalBins, "numeric")
expect_is(object@distances, "NULL")
expect_is(object@selfInteractionRatios, "NULL")
expect_is(object@compartments, "NULL")
expect_is(object@concordances, "NULL")
expect_is(object@differences, "NULL")
expect_is(object@centroids, "NULL")
expect_is(object@parameters, "list")
expect_is(object@checks, "NULL")
expect_is(object, "InteractionSet")
# Interactions
expect_is(SummarizedExperiment::assay(object), "matrix")
expect_is(InteractionSet::regions(object), "GRanges")
expect_is(InteractionSet::interactions(object), "StrictGInteractions")
expect_is(S4Vectors::mcols(object), "DataFrame")
expect_is(SummarizedExperiment::assay(object), "matrix")
expect_equal(is.numeric(SummarizedExperiment::assay(object)), TRUE)
})
test_that("HiCDOCDalinkToMatrixtaSetFromTabular produce correct values", {
path <- system.file(
"extdata",
"liver_18_10M_500000.tsv",
package = "HiCDOC"
)
expect_error(object <- HiCDOCDataSetFromTabular(path), NA)
gi <- InteractionSet::interactions(object)
assays <- SummarizedExperiment::assay(object)
# Interactions
expect_equal(nrow(assays), 210)
expect_equal(mean(gi@anchor1), 7.333333, tolerance = 1e-5)
expect_equal(mean(gi@anchor2), 13.66667, tolerance = 1e-5)
expect_equal(mean(assays), 484.019, tolerance = 1e-5)
# chromosomes
expect_identical(object@chromosomes, "18")
# bins
expect_equal(object@totalBins, c("18" = 20))
# Parameters
expect_identical(object@parameters, defaultHiCDOCParameters)
# Positions
regions <- data.frame(InteractionSet::regions(object))
expect_equal(mean(regions$index), 10.5, tolerance = 1e-5)
expect_equal(mean(regions$start), 4750001)
expect_equal(mean(regions$end), 5250000)
})
<file_sep>#### HiCDOCDataSet class definition ####
#' @title
#' \code{HiCDOCDataSet} S4 class.
#'
#' @name
#' HiCDOCDataSet-class
#'
#' @aliases
#' HiCDOCDataSet
#'
#' @description
#' Data structure for a Hi-C experiment.
#'
#' @details
#' An instance of \code{HiCDOCDataSet} describes a Hi-C experiment with slots
#' for path(s) to input file(s), interactions, pipeline parameters defaulting to
#' \code{defaultHiCDOCParameters}, and computation results. It can be
#' constructed from 4 different types of data:
#' - Tabular files: see \code{\link{HiCDOCDataSetFromTabular}}
#' - (m)Cool files: see \code{\link{HiCDOCDataSetFromCool}}
#' - HiC files: see \code{\link{HiCDOCDataSetFromHiC}}
#' - HiC-Pro matrices and bed files: see \code{\link{HiCDOCDataSetFromHiCPro}}
#' An example \code{HiCDOCDataSet} is also available, see
#' \code{\link{exampleHiCDOCDataSet}}.
#' The \code{HiCDOCDataSet} object can be explored using the appropriate
#' accessors.
#'
#' @details # Accessors
#' The accessors for a HiCDOCDataset object are the following:
#' - \code{\link{chromosomes}} to retrieve the vector of chromosome names.
#' - \code{\link{sampleConditions}} to retrieve the vector of condition names,
#' one for each sample.
#' - \code{\link{sampleReplicates}} to retrieve the vector of replicate names,
#' one for each sample.
#'
#' After the detection of compartments you can use this accessors:
#' - \code{\link{compartments}} returns a GenomicRange of the compartment
#' of every position in every condition.
#' - \code{\link{concordances}} returns a GenomicRange of the significant
#' compartment differences between conditions, and their p-values.
#' - \code{\link{differences}} returns a GenomicRange of the concordance
#' (confidence in assigned compartment) of every position in every replicate.
#'
#'
#' See the \link{HiCDOCDataSet-methods} man page for more details on methods
#' and accessors.
#'
#' @seealso
#' \code{\link{HiCDOC}},
#' \code{\link{exampleHiCDOCDataSet}},
#' \code{\link{HiCDOCDataSetFromTabular}},
#' \code{\link{HiCDOCDataSetFromCool}},
#' \code{\link{HiCDOCDataSetFromHiC}},
#' \code{\link{HiCDOCDataSetFromHiCPro}}
#'
#' @md
#' @export
setClass(
"HiCDOCDataSet",
contains = "InteractionSet",
slots = c(
input = "ANY",
parameters = "ANY",
chromosomes = "ANY",
totalBins = "ANY",
weakBins = "ANY",
validAssay = "ANY",
compartments = "ANY",
concordances = "ANY",
differences = "ANY",
comparisons = "ANY",
distances = "ANY",
centroids = "ANY",
selfInteractionRatios = "ANY",
checks = "ANY"
)
)
#' @rdname HiCDOCDataSet-parameters
#' @name HiCDOCDataSet-parameters
#' @export
defaultHiCDOCParameters <- list(
smallChromosomeThreshold = 100,
sparseReplicateThreshold = 0.3,
weakPositionThreshold = 1,
cyclicLoessSpan = NA_real_,
loessSampleSize = 20000,
kMeansDelta = 0.0001,
kMeansIterations = 50,
kMeansRestarts = 20,
PC1CheckThreshold = 0.75
)
#' @title
#' \code{\link{HiCDOCDataSet}} constructor from a tabular file.
#'
#' @description
#' Constructs a \code{\link{HiCDOCDataSet}} from a tabular file.
#'
#' @details
#' Accepts a tabular file with \code{chromosome}, \code{position 1},
#' \code{position 2}, and multiple replicate columns listing interaction counts.
#' Null interactions do not have to be listed. Values must be separated by
#' tabulations. The header must be
#' \code{chromosome position 1 position 2 x.y x.y x.y ...} with \code{x}
#' replaced by condition names and \code{y} replaced by replicate names.
#'
#' @param path
#' A path to a tabular file.
#' @param sep
#' The separator of the tabular file. Default to tabulation.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' path <- system.file("extdata", "liver_18_10M_500000.tsv", package = "HiCDOC")
#' object <- HiCDOCDataSetFromTabular(path, sep = '\t')
#'
#' @usage
#' HiCDOCDataSetFromTabular(path, sep = '\t')
#'
#' @export
HiCDOCDataSetFromTabular <- function(
path = NULL,
sep = "\t"
) {
if (!is.character(path) || length(path) > 1) {
stop("'paths' must be a string of characters.", call. = FALSE)
}
if (!file.exists(path)) {
stop("'", path, "' does not exist.", call. = FALSE)
}
object <- .parseTabular(path, sep = sep)
object <- .fillHiCDOCDataSet(object)
return(invisible(object))
}
#' @title
#' \code{\link{HiCDOCDataSet}} constructor from Cool files.
#'
#' @description
#' Constructs a \code{\link{HiCDOCDataSet}} from a set of \code{.cool} or
#' \code{.mcool} files.
#'
#' @param paths
#' A vector of paths to \code{.cool} or \code{.mcool} files.
#' @param replicates
#' A vector of replicate names repeated along the conditions.
#' @param conditions
#' A vector of condition names repeated along the replicates.
#' @param binSize
#' The resolution (span of each position in number of bases). Optionally
#' provided to select the appropriate resolution in \code{.mcool} files.
#' Defaults to NULL.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' \dontrun{
#' # Path to each file
#' paths = c(
#' 'path/to/condition-1.replicate-1.cool',
#' 'path/to/condition-1.replicate-2.cool',
#' 'path/to/condition-2.replicate-1.cool',
#' 'path/to/condition-2.replicate-2.cool',
#' 'path/to/condition-3.replicate-1.cool'
#' )
#'
#' # Replicate and condition of each file. Can be names instead of numbers.
#' replicates <- c(1, 2, 1, 2, 1)
#' conditions <- c(1, 1, 2, 2, 3)
#'
#' # Resolution to select in .mcool files
#' binSize = 500000
#'
#' # Instantiation of data set
#' object <- HiCDOCDataSetFromCool(
#' paths,
#' replicates = replicates,
#' conditions = conditions,
#' binSize = binSize # Specified for .mcool files.
#' )
#' }
#'
#' @export
HiCDOCDataSetFromCool <- function(
paths,
replicates,
conditions,
binSize = NA
) {
if(!requireNamespace('rhdf5')) stop("'rhdf5' package is required. Please install it and retry.")
if (is.factor(paths)) {
paths <- as.vector(paths)
}
if (!is.character(paths)) {
stop("'paths' must be a vector of characters.", call. = FALSE)
}
for (path in paths) {
if (!file.exists(path)) {
stop("'", path, "' does not exist.", call. = FALSE)
}
}
if (is.factor(replicates)) {
conditions <- as.vector(replicates)
}
if (is.null(replicates)) {
stop("'replicates' must be a vector of replicates.", call. = FALSE)
}
if (is.factor(conditions)) {
conditions <- as.vector(conditions)
}
if (is.null(conditions)) {
stop("'conditions' must be a vector of conditions.", call. = FALSE)
}
if (!is.na(binSize) && (!is.numeric(binSize) || length(binSize) != 1)) {
stop("'binSize' must be an integer.", call. = FALSE)
}
object <- new("HiCDOCDataSet")
object@input <- paths
object <- .parseCool(object, binSize, replicates, conditions)
object <- .fillHiCDOCDataSet(object)
return(invisible(object))
}
#' @title
#' \code{\link{HiCDOCDataSet}} constructor from HiC files.
#'
#' @description
#' Constructs a \code{\link{HiCDOCDataSet}} from a set of
#' \code{.hic} files.
#'
#' @param paths
#' A vector of paths to \code{.hic} files.
#' @param replicates
#' A vector of replicate names repeated along the conditions.
#' @param conditions
#' A vector of condition names repeated along the replicates.
#' @param binSize
#' The resolution (span of each position in number of bases) to select within
#' the \code{.hic} files.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' \dontrun{
#' #' # Path to each file
#' paths = c(
#' 'path/to/condition-1.replicate-1.hic',
#' 'path/to/condition-1.replicate-2.hic',
#' 'path/to/condition-2.replicate-1.hic',
#' 'path/to/condition-2.replicate-2.hic',
#' 'path/to/condition-3.replicate-1.hic'
#' )
#'
#' # Replicate and condition of each file. Can be names instead of numbers.
#' replicates <- c(1, 2, 1, 2, 1)
#' conditions <- c(1, 1, 2, 2, 3)
#'
#' # Resolution to select
#' binSize <- 500000
#'
#' # Instantiation of data set
#' hic.experiment <- HiCDOCDataSetFromHiC(
#' paths,
#' replicates = replicates,
#' conditions = conditions,
#' binSize = binSize
#' )
#' }
#'
#' @usage
#' HiCDOCDataSetFromHiC(paths, replicates, conditions, binSize)
#'
#' @export
HiCDOCDataSetFromHiC <- function(
paths = NULL,
replicates = NULL,
conditions = NULL,
binSize = NULL
) {
if (is.factor(paths)) {
paths <- as.vector(paths)
}
if (!is.character(paths)) {
stop("'paths' must be a vector of characters.", call. = FALSE)
}
for (path in paths) {
if (!file.exists(path)) {
stop("'", path, "' does not exist.", call. = FALSE)
}
}
if (is.factor(replicates)) {
replicates <- as.vector(replicates)
}
if (is.null(replicates)) {
stop("'replicates' must be a vector of replicates.", call. = FALSE)
}
if (length(replicates) != length(paths)) {
stop("'replicates' should have the same length as 'paths'")
}
if (is.factor(conditions)) {
conditions <- as.vector(conditions)
}
if (is.null(conditions)) {
stop("'conditions' must be a vector of conditions.", call. = FALSE)
}
if (length(conditions) != length(paths)) {
stop("'conditions' should have the same length as 'paths'")
}
if (!is.numeric(binSize) || length(binSize) != 1) {
stop("'binSize' must be an integer.", call. = FALSE)
}
binSize <- as.integer(binSize)
object <- new("HiCDOCDataSet")
object@input <- paths
object <- .parseHiC(object, binSize, replicates, conditions)
object <- .fillHiCDOCDataSet(object)
return(invisible(object))
}
#' @title
#' \code{\link{HiCDOCDataSet}} constructor from HiC-Pro files.
#'
#' @description
#' Constructs a \code{\link{HiCDOCDataSet}} from a set of HiC-Pro generated
#' files.
#'
#' @param matrixPaths
#' A vector of paths to HiC-Pro matrix files.
#' @param bedPaths
#' A vector of paths to HiC-Pro bed files.
#' @param replicates
#' A vector of replicate names repeated along the conditions.
#' @param conditions
#' A vector of condition names repeated along the replicates.
#'
#' @return
#' A \code{\link{HiCDOCDataSet}}.
#'
#' @examples
#' \dontrun{
#' # Path to each matrix file
#' matrixPaths = c(
#' 'path/to/condition-1.replicate-1.matrix',
#' 'path/to/condition-1.replicate-2.matrix',
#' 'path/to/condition-2.replicate-1.matrix',
#' 'path/to/condition-2.replicate-2.matrix',
#' 'path/to/condition-3.replicate-1.matrix'
#' )
#'
#' # Path to each bed file
#' bedPaths = c(
#' 'path/to/condition-1.replicate-1.bed',
#' 'path/to/condition-1.replicate-2.bed',
#' 'path/to/condition-2.replicate-1.bed',
#' 'path/to/condition-2.replicate-2.bed',
#' 'path/to/condition-3.replicate-1.bed'
#' )
#'
#' # Replicate and condition of each file. Can be names instead of numbers.
#' replicates <- c(1, 2, 1, 2, 1)
#' conditions <- c(1, 1, 2, 2, 3)
#'
#' # Instantiation of data set
#' hic.experiment <- HiCDOCDataSetFromHiCPro(
#' matrixPaths = matrixPaths,
#' bedPaths = bedPaths,
#' replicates = replicates,
#' conditions = conditions
#' )
#' }
#'
#' @usage
#' HiCDOCDataSetFromHiCPro(matrixPaths, bedPaths, replicates, conditions)
#'
#' @export
HiCDOCDataSetFromHiCPro <- function(
matrixPaths = NULL,
bedPaths = NULL,
replicates = NULL,
conditions = NULL
) {
if (is.factor(matrixPaths)) {
matrixPaths <- as.vector(matrixPaths)
}
if (!is.character(matrixPaths)) {
stop("'matrixPaths' must be a vector of characters.", call. = FALSE)
}
if (is.factor(bedPaths)) {
bedPaths <- as.vector(bedPaths)
}
if (!is.character(bedPaths)) {
stop("'bedPaths' must be a vector of characters.", call. = FALSE)
}
if (length(matrixPaths) != length(bedPaths)) {
stop(
"'matrixPaths' and 'bedPaths' must have the same length.",
call. = FALSE
)
}
paths <-
base::split(
base::cbind(matrixPaths, bedPaths),
seq(length(matrixPaths))
)
for (path in unlist(paths)) {
if (!file.exists(path)) {
stop("'", path, "' does not exist.", call. = FALSE)
}
}
if (is.factor(replicates)) {
replicates <- as.vector(replicates)
}
if (is.null(replicates)) {
stop("'replicates' must be a vector of replicates.", call. = FALSE)
}
if (is.factor(conditions))
conditions <- as.vector(conditions)
if (is.null(conditions)) {
stop("'conditions' must be a vector of conditions.", call. = FALSE)
}
if (length(conditions) != length(replicates)) {
stop(
"'conditions' and 'replicates' must have the same length",
call. = FALSE
)
}
object <- new("HiCDOCDataSet")
object@input <- paths
object <- .parseHiCPro(object, replicates, conditions)
object <- .fillHiCDOCDataSet(object)
return(invisible(object))
}
#' @title
#' Default pipeline to run on the HiCDOC analysis.
#'
#' @description
#' Runs the default filtering, normalization, and computational steps on a
#' \code{HiCDOCDataSet}. To learn more about HiCDOC, browse the vignette:
#' \code{browseVignettes(package = "HiCDOC")}.
#'
#' @details
#' \subsection{\code{HiCDOC} pipeline}{
#' The HiCDOC pipeline has seven steps:
#' \describe{
#' \item{Three filtering steps:}{
#' \itemize{
#' \item{\code{\link{filterSmallChromosomes}}}{
#' to filter out small chromosomes
#' }
#' \item{\code{\link{filterWeakPositions}}}{
#' to filter out weak positions with very few interactions
#' }
#' \item{\code{\link{filterSparseReplicates}}}{
#' to filter out sparse replicates with many null
#' interactions
#' }
#' }
#' }
#' \item{Three normalization steps:}{
#' \itemize{
#' \item{\code{\link{normalizeTechnicalBiases}}}{
#' to normalize technical biases in each replicates
#' }
#' \item{\code{\link{normalizeBiologicalBiases}}}{
#' to normalize biological biases in each replicate
#' }
#' \item{\code{\link{normalizeDistanceEffect}}}{
#' to normalize the distance effect in each chromosome
#' }
#' }
#' }
#' \item{One computational step:}{
#' \itemize{
#' \item{\code{\link{detectCompartments}}}{
#' to detect compartments in each condition and find
#' significant changes between conditions.
#' }
#' }
#' }
#' }
#' }
#' \subsection{Parallel processing}{
#' The parallel version of HiCDOC uses the
#' \code{\link{BiocParallel}} package. Before to call the
#' function in parallel you should specify the parallel parameters such as:
#' \itemize{
#' \item{On Linux:
#'
#' \code{multiParam <- BiocParallel::MulticoreParam(workers = 10)}
#' }
#' \item{On Windows:
#'
#' \code{multiParam <- BiocParallel::SnowParam(workers = 10)}
#' }
#' }
#' And then you can register the parameters to be used by BiocParallel:
#'
#' \code{BiocParallel::register(multiParam, default = TRUE)}
#'
#' You should be aware that using MulticoreParam, reproducibility of the
#' detectCompartments function using a RNGseed may not work. See this
#' \href{https://github.com/Bioconductor/BiocParallel/issues/122}{issue}
#' for more details.
#' }
#'
#' @param object
#' A \code{HiCDOCDataSet}.
#' @param parallel
#' Whether or not to parallelize each step. Defaults to FALSE.
#'
#' @return
#' A HiCDOCDataSet with all slots filled.
#'
#' @seealso
#' \code{\link{HiCDOCDataSet}}, \code{\link{filterSmallChromosomes}},
#' \code{\link{filterWeakPositions}}, \code{\link{filterSparseReplicates}},
#' \code{\link{normalizeTechnicalBiases}},
#' \code{\link{normalizeBiologicalBiases}},
#' \code{\link{normalizeDistanceEffect}},
#' \code{\link{detectCompartments}}
#'
#' @examples
#' data(exampleHiCDOCDataSet)
#' # Default HiCDOC pipeline
#' # Not printing loess warnings for example purpose.
#' # Results should be inspected if there is any.
#' suppressWarnings(
#' object <- HiCDOC(exampleHiCDOCDataSet)
#' )
#'
#' # Equivalent to
#' if(FALSE){
#' object <- filterSmallChromosomes(exampleHiCDOCDataSet)
#' object <- filterSparseReplicates(object)
#' object <- filterWeakPositions(object)
#' object <- normalizeTechnicalBiases(object)
#' object <- normalizeBiologicalBiases(object)
#' object <- normalizeDistanceEffect(object)
#' object <- detectCompartments(object)
#' }
#'
#' @export
HiCDOC <- function(
object,
parallel = FALSE
) {
object <- filterSmallChromosomes(object)
object <- filterSparseReplicates(object)
object <- filterWeakPositions(object)
object <- normalizeTechnicalBiases(object, parallel = parallel)
object <- normalizeBiologicalBiases(object, parallel = parallel)
object <- normalizeDistanceEffect(object)
object <- detectCompartments(object, parallel = parallel)
return(invisible(object))
}
| 2fccd724774e92162a1a91746b1d9d4d664490c1 | [
"Markdown",
"R",
"C++",
"RMarkdown"
] | 55 | R | mzytnicki/HiCDOC | b5ac243c661323aa7de54a711a617652b7e0dcd4 | a21354fb626eb4b229be75bb87e41c15c21a0f57 |
refs/heads/main | <file_sep>"""
Pathlike Parser parses command line arguments like svg paths.
"""
import re
PATTERN_COMMAWSP = r'[ ,\t\n\x09\x0A\x0C\x0D]+'
PATTERN_FLOAT = '[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?'
cmd_parse = [
('COMMAND', r'[a-df-zA-DF-Z]'),
('SKIP', PATTERN_COMMAWSP)
]
cmd_re = re.compile('|'.join('(?P<%s>%s)' % pair for pair in cmd_parse))
num_parse = [
('FLOAT', PATTERN_FLOAT),
('SKIP', PATTERN_COMMAWSP)
]
num_re = re.compile('|'.join('(?P<%s>%s)' % pair for pair in num_parse))
flag_parse = [
('FLAG', r'[0-9]'),
('SKIP', PATTERN_COMMAWSP)
]
flag_re = re.compile('|'.join('(?P<%s>%s)' % pair for pair in flag_parse))
str_parse = [
('QSTR', r'`([^`]*)`'),
('DSTR', r'[^ ,\t\n\x09\x0A\x0C\x0D]+'),
('SKIP', PATTERN_COMMAWSP),
]
str_re = re.compile('|'.join('(?P<%s>%s)' % pair for pair in str_parse))
more_parse = [
('FLOAT', PATTERN_FLOAT),
('BTICK', r'`'),
('SKIP', PATTERN_COMMAWSP)
]
more_re = re.compile('|'.join('(?P<%s>%s)' % pair for pair in more_parse))
def command(parser, letters, *arguments):
def command_dec(func):
parser.add_command(letters, func, arguments)
return func
return command_dec
class Command:
def __init__(self, letter, cmd, arguments):
self.letter = letter
self.command = cmd
self.arguments = arguments
class PathlikeParser:
def __init__(self):
self.commands = dict()
self.pathd = None
self.pos = 0
self.limit = 0
def add_command(self, letters, cmd, arguments):
for letter in letters:
self.commands[letter] = Command(letter, cmd, arguments)
def _command(self):
while self.pos < self.limit:
match = cmd_re.match(self.pathd, self.pos)
if match is None:
return None # Did not match at command sequence.
self.pos = match.end()
kind = match.lastgroup
if kind == 'SKIP':
continue
return match.group()
return None
def _more(self):
while self.pos < self.limit:
match = more_re.match(self.pathd, self.pos)
if match is None:
return False
kind = match.lastgroup
if kind == 'SKIP':
# move skipped elements forward.
self.pos = match.end()
continue
return True
return None
def _number(self):
while self.pos < self.limit:
match = num_re.match(self.pathd, self.pos)
if match is None:
break # No more matches.
kind = match.lastgroup
self.pos = match.end()
if kind == 'SKIP':
continue
return float(match.group())
return None
def _flag(self):
while self.pos < self.limit:
match = flag_re.match(self.pathd, self.pos)
if match is None:
break # No more matches.
kind = match.lastgroup
self.pos = match.end()
if kind == 'SKIP':
continue
return bool(int(match.group()))
return None
def _string(self):
while self.pos < self.limit:
match = str_re.match(self.pathd, self.pos)
if match is None:
break # No more matches.
kind = match.lastgroup
self.pos = match.end()
if kind == 'SKIP':
continue
if kind == 'QSTR':
return str(match.group())[1:-1]
return str(match.group())
return None
def parse(self, pathd):
if isinstance(pathd, (tuple, list)):
pathd = " ".join(pathd)
self.pathd = pathd
self.pos = 0
self.limit = len(pathd)
while True:
cmd = self._command()
if cmd is None:
return
try:
command = self.commands[cmd]
except KeyError:
return
arguments = command.arguments
while True:
args = list()
if arguments is None:
command.command()
if self._more():
raise ValueError
break
if not isinstance(arguments, (tuple,list)):
arguments = tuple(arguments)
for arg in arguments:
if arg is None:
break
if arg is float:
args.append(self._number())
elif arg is int:
args.append(int(self._number()))
elif arg is bool:
args.append(self._flag())
elif arg is str:
args.append(self._string())
else:
args.append(arg(self._string()))
command.command(*args)
if not self._more():
break
<file_sep># pathlike
SVG-Path like parsing for command line and other parsing.
Pathlike does parsing of strings into executable calls like svg-pathd parsing.
Installing:
`pip install pathlike`
See:
https://www.w3.org/TR/SVG/paths.html
The goal of this project is to bring the easy compact path-like parsing to CLI (or other parsing). You may want the same very compact short syntax found in SVG Path Data, for other arbitrary routines.
* All instructions are expressed as one character.
* Superfluous white space and separators (such as commas) may be eliminated; for instance, the following contains unnecessary spaces:
* M 100 100 L 200 200
* It may be expressed more compactly as:
* M100 100L200 200
* A command letter may be eliminated if an identical command letter would otherwise precede it; for instance, the following contains an unnecessary second "L" command:
* M 100 200 L 200 100 L -100 -200
* It may be expressed more compactly as:
* M 100 200 L 200 100 -100 -200
The path data syntax is a prefix notation (i.e., commands followed by parameters). The only allowable decimal point is a Unicode U+0046 FULL STOP (".") character (also referred to in Unicode as PERIOD, dot and decimal point) and no other delimiter characters are allowed. (For example, the following is an invalid numeric value in a path data stream: "13,000.56". Instead, say: "13000.56".)
We are making parsing *like* that SVG-Path parsing, but with arbitrarily defined commands. This could be used to parse SVG Paths, but it's intended to be more broadly functional than that.
* The commands can be any single letter in ascii in upper or lower case that is not the letter 'e' (reserved for floats)
# Types:
* `float` float type is expects a float number.
* `int` int type expects an string integer.
* `bool` bool type expects either a 1 or 0. This is not keyword like True or False it is a flagged 0 or 1. Also, note because of the greedy nature flags can be compounded together without issue if we are expecting `bool, float, str` we can give the pathlike string: `11.2Yellow` and this is bool: `1`, float: `1.2` and str: `Yellow`.
* `str` string types can contain letters, so these cannot take non-backtick quoted strings as multiple commands. The first string can be accepted without COMMA_WS characters, however if the first element of a new cycle is a string it must be backticked so that the we can determine whether this is more data of the original command or a new command.
* other: other types are taken as strings and these are passed into the type given in arguments. The only permitted initialization used on an undefined type is a `str`. We treat the parsing exactly like a string. For example if we have `complex` we can call that with `100+2j` and it will feed that as a string into the `complex('100+2j')`. Undefined types also have the same multi-call limiting factor of strings.
# Examples:
Assuming: `@command(parser, "j", float, float)`
`myscript.py j200 50 30-7qql200.3`
This runs arbitrary command j with (200, 50), j with (30, -7), command q, command q, command l with (200.3)
eg.
```python
@command(parser, "sS", str)
def print_this_string(string):
print(string)
```
Using that:
```
> myscript.py s`Hello World!`
> Hello World!
```
Using that command with multiple operators:
```
> myscript.py s`Hello World!` `Here I am!`
> Hello World!
> Here I am!
```
The first string does not require backticks. Only strings with spaces and additional commands with strings.
```
> myscript.py sHi `Hello World!` `Here I am!`
> Hi!
> Hello World!
> Here I am!
```
If we apply additional data:
```
> myscript.py she sells seashells by the seashore
> he
> ells
> eashells
```
Note that it stops as soon as it reaches the word `by` and because `b` is not a recognized command. Parsing stops. This is consistent with the methods for svg path parsing.
Alternatively if we are not using quoted syntax the remainder of the current COMMAWSP delinated element is used.
myscript.py sHi
This is treated as s("Hi") rather than s, H, i
myscript.py s Hi.
Is likewise treated as s("Hi.") rather than s, H, i
# Example Script
```python
from sys import argv
from pathlike import PathlikeParser, command
parser = PathlikeParser()
@command(parser, "sS", str)
def print_this_string(q):
print(q)
parser.parse(argv[1:])
```
To mark a particular function as linked with some characters, we use decorators. Specifically we define the function with the `@command` annotation.
We denote additional values with additional types:
```python
@command('j', float, float, str)
def funct(a0_float, a1_float, a2_str)
pass
```
We also only support principle types since everything needs to be known. Unknown types are treated
like string commands and we attempt to type cast them.
`@command('f', Color)` expects 1 argument that consists of a Color class. It is assumed that Color can accept the string "Blue"
`myscript.py fBlue`
We can also do multiple values here.
```
myscript.py f`Blue` `Red`
```
We don't however need to treat strings with backticks if the the first argument is a number-type:
For example if we have:
```
@command(parser, "aA", bool, str)
def rev(b, q):
if b:
print(q)
else:
print(q[::-1])
```
```
> myscript.py a 1yellow 0blue 1red 0backwards 1forwards
> yellow
> eulb
> red
> sdrawkcab
> forwards
```
Because the boolean comes before the string, we can determine that more commands are needed for the `a` command. We only need backticks to capture COMMA_WS characters or if the the more parsing is ambiguous. `yellow blue` would return None for the bool, which becomes `False` but the next iteration the first character is `b` which is a new command and is expected to be either `0` or `1`
# Longer Example
```python
from sys import argv
from pathlike import PathlikeParser, command
parser = PathlikeParser()
@command(parser, "zZ")
def close():
print("Closed.")
@command(parser, "mM", float, float)
def cmd(x, y):
print("Moved %f %f" % (x,y))
@command(parser, "lL", float, float)
def cmd(x, y):
print("Line-to %f %f" % (x,y))
@command(parser, "hH", float)
def cmd(x):
print("Horizontal %f" % (x))
@command(parser, "vV", float)
def cmd(y):
print("Vertical %f" % (y))
@command(parser, "tT", float, float)
def cmd(x, y):
print("Smooth-quad %f %f" % (x,y))
@command(parser, "qQ", float, float, float, float)
def cmd(cx, cy, x, y):
print("Quad To %f %f, %f %f" % (cx, cy, x, y))
@command(parser, "sS", float, float, float, float)
def cmd(cx, cy, x, y):
print("Smooth Cubic To %f %f, %f %f" % (cx, cy, x, y))
@command(parser, "cC", float, float, float, float, float, float)
def cmd(cx1, cy1, cx2, cy2, x, y):
print("Cubic To %f %f, %f %f, %f %f" % (cx1, cy1, cx2, cy2, x, y))
@command(parser, "aA", float, float, float, bool, bool, float, float)
def cmd(cx, cy, rot, sweep, large_arc, x, y):
print("Arc cx:%f cy:%f, rot:%f, %d %d, to: %f %f" % (cx, cy, rot, sweep, large_arc, x, y))
args = argv[1:]
parser.parse(args)
```
This parses SVG paths.
These paths are tricky parsing paths from the test files put out by W3C.
```
> myscript.py M200,120 h-25 a25,25 0 1125,25 z
> Moved 200.000000 120.000000
> Horizontal -25.000000
> Arc cx:25.000000 cy:25.000000, rot:0.000000, 1 1, to: 25.000000 25.000000
> Closed.
> myscript.py M280,120 h25 a25,25 0 6 0 -25,25 z"
> Moved 280.000000 120.000000
> Horizontal 25.000000
> Arc cx:25.000000 cy:25.000000, rot:0.000000, 1 0, to: -25.000000 25.000000
> Closed.
```
| 8ab8bea57b52b76735c627a0e8cf370a66370f39 | [
"Markdown",
"Python"
] | 2 | Python | tatarize/pathlike | 8c2b2ff8f77a6a195868616a00a8e049254ed7f4 | b27a70f390903df6a88ca0a3e561fa4c673d7d74 |
refs/heads/main | <repo_name>Martin-Rojas/Web_Development<file_sep>/Project06Clock/script.js
const colorsbg = [
"#4FC3F7",
"#29B6F6",
"#03A9F4",
"#039BE5",
"#0288D1",
"#0277BD",
"#01579B",
"#9575CD",
"#7E57C2",
"#673AB7",
"#5E35B1",
"#512DA8",
"#4527A0",
"#311B92",
"#7986CB",
"#5C6BC0",
"#3F51B5",
"#3949AB",
"#303F9F",
"#283593",
"#1A237E",
"#64B5F6",
"#42A5F5",
"#2196F3",
"#1E88E5",
"#1976D2",
"#1565C0",
"#0D47A1",
];
// Get elements
let hoursEl = document.getElementById("hour");
let minutesEl = document.getElementById("minutes");
let secondsEl = document.getElementById("seconds");
let body = document.querySelector("body");
// Format time
const formatTime = (time) => {
return time < 10 ? `0${time}` : time;
};
const currentDate = new Date();
// Get and display time.
const displayTime = () => {
const currentDate = new Date();
const hours = formatTime(currentDate.getHours());
const minutes = formatTime(currentDate.getMinutes());
const seconds = formatTime(currentDate.getSeconds());
// Display the hour, minutes and seconds in the HTML.
hoursEl.innerText = hours;
minutesEl.innerText = minutes;
secondsEl.innerText = seconds;
};
let index = 0;
function changeBackground() {
body.style.background = colorsbg[index];
}
// Initial call of the functions.
changeBackground();
displayTime();
setInterval(() => {
displayTime();
changeBackground();
if (index > colorsbg.length) {
index = 0;
}
index++;
}, 1000);
| 894fb152c63b9535bbda623c428bef1893cad02e | [
"JavaScript"
] | 1 | JavaScript | Martin-Rojas/Web_Development | b2d0ae34737e35c0996cdbd41afb249b833faf3e | 53b0696dc5b760e4e88e2c2b20d5164452e9a479 |
refs/heads/master | <file_sep>#include<iostream>
#include<vector>
using namespace std;
vector<pair<int, int>> vec;
int c = 0;
bool sp(int i, vector<int> student) {
int flag1 = -1, flag2=-1;
int size = student.size();
for (int j = 0; j < size; j++) {
if (vec[i].first == student[j]) {
flag1 = j;
if (flag2 != -1) {
student.erase(student.begin() + flag1);
student.erase(student.begin() + flag2);
break;
}
}
if (vec[i].second == student[j] ) {
flag2 = j;
if (flag1 != -1) {
student.erase(student.begin() + flag2);
student.erase(student.begin() + flag1);
break;
}
}
}
if (flag1 == -1 || flag2 == -1) {
return false;
}
if (student.empty()) {
c++;
return true;
}
for (int j = i+1; j < vec.size(); j++) {
if (vec[j].first == student.front() || vec[j].second == student.front()) {
sp(j, student);
}
}
if (!student.empty()) {
return false;
}
}
int main(void) {
ios::sync_with_stdio(false);
cin.tie(NULL);
int test_case, n, m, s1, s2;
cin >> test_case;
vector<int> student;
while (test_case--) {
cin >> n >> m;
while (m--) {
cin >> s1 >> s2;
vec.push_back(make_pair(s1, s2));
}
for (int i = 0; i < n; i++) {
student.push_back(i);
}
for (int i = 0; i < vec.size(); i++) {
sp(i, student);
}
vec.clear();
student.clear();
cout << c << '\n';
c = 0;
}
system("PAUSE");
return 0;
}<file_sep>ex) [6.5][이하연] 게임판 덮기.cpp
- cpp 파일만 업로드 해주세요<file_sep>#include<iostream>
#include <string>
using namespace std;
char arr[5][5] = { { 'U','R','L','P','M' },{ 'X','P','R','E','T' },{ 'G','I','A','E','T' },{ 'X','T','N','Z','Y' },{ 'X','O','Q','R','S' } };
int xy[8][2] = { {-1,-1},{-1,0},{-1,1},{0,-1},{0,1},{1,-1},{1,0},{1,1} };
bool flag=false;
void boggle(string word, int x, int y) {
if (word == "") {
flag = true;
}
if (arr[x][y] == word[0]) {
for (int i = 0; i < 8; i++) {
if (x + xy[i][0] >= 0 && y + xy[i][1] >= 0 && x + xy[i][0] <5 && y + xy[i][1] < 5) {
boggle(word.substr(1), x + xy[i][0], y + xy[i][1]);
}
}
}
}
int main(void) {
ios::sync_with_stdio(false);
cin.tie(NULL);
string word = "AQ";
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
boggle(word, i, j);
}
}
if (flag) {
cout << "YES";
}
else {
cout << "NO";
}
system("PAUSE");
return 0;
}<file_sep>#include<iostream>
#include <string>
#include<vector>
using namespace std;
vector<vector<pair<int, int>>> block = { { {0,0},{0,1},{1,1} }, {{0,0},{0,1},{1,0}},{{0,0},{1,0},{1,1}},{{0,0},{1,0},{1,-1}} };
//ㄱ, Γ, ㄴ, 」
int c=0;
int count_white(vector<string> vec) {
int count = 0;
for (int i = 0; i < vec.size(); i++) {
for (int j = 0; j < vec[i].size(); j++) {
if (vec[i][j] == '.') {
count++;
}
}
}
return count;
}
void cover(vector<string> vec, int x, int y, int count) {
bool flag = true;
//블록이 4가지 종류를 각각 넣어봄
for (int i = 0; i < block.size(); i++) {
flag = true;
//블록을 놓을 수 있는 지 체크
for (int j = 0; j < block[i].size(); j++) {
if (x + block[i][j].first < vec.size() && y + block[i][j].second < vec[x].size()&& x + block[i][j].first>=0 && y + block[i][j].second>=0) {
if (vec[x + block[i][j].first][y + block[i][j].second] == '#') {
flag = false;
break;
}
}
else {
flag = false;
break;
}
}
//놓을 수 있으면 변경
if (flag) {
for (int j = 0; j < block[i].size(); j++) {
vec[x + block[i][j].first][y + block[i][j].second] = '#';
count--;
}
}
if (count == 0) {
c++;
}
//다음 흰색 칸 위치로
for (int k = x; k < vec.size(); k++) {
for (int z = 0; z < vec[k].size(); z++) {
if (vec[k][z] == '.') {
//흰색 위치부터 다시 블록 놓아봄
if (z == y && k == x) {
continue;
}
else if (z < y && k == x) {
continue;
}
cover(vec, k, z, count);
}
}
}
}
}
int main(void) {
ios::sync_with_stdio(false);
cin.tie(NULL);
int test_case, h, w, white_count=0, result=0;
string temp;
vector<string> vec;
cin >> test_case;
while (test_case--) {
cin >> h >> w;
for (int i = 0; i < h; i++) {
cin >> temp;
vec.push_back(temp);
}
white_count = count_white(vec);
if (white_count % 3 != 0) {
c = 0;
}
else {
for (int i = 0; i < vec.size(); i++) {
for (int j = 0; j < vec[i].size(); j++) {
if (vec[i][j] == '.') {
cover(vec, i, j, white_count);
}
}
}
}
}
cout << c << '\n';
c = 0;
system("PAUSE");
return 0;
}<file_sep># 알고리즘 스터디
### 📍 교재
프로그래밍 대회에서 배우는 알고리즘 문제해결 전략
### 🌱 언어
C++
### ✨ Memo
##### 스터디 기간
~ 11월 말
##### 목표
알고리즘 책 및 백준 알고리즘 병행
##### 방식
- 주 1회, 매 주 한 명씩 돌아가며 리딩 진행
- 리딩 방식은 자율
- 리딩 맡은 사람은 Summary 폴더에 내용 정리해서 업로드
- 스터디 모임 전까지 문제 푼 소스코드(.cpp) 업로드 해놓기
##### 참여자 💕
[정윤식](https://github.com/ychic)
[이하연](https://github.com/hayeonmazu)
[박상현](https://github.com/BbakSsang)
[문서희](https://github.com/MunSeoHee)
<file_sep>ex) [문서희] 4_5~6_5.md<file_sep>ex) [6_5][이하연] 게임판 덮기.cpp
- cpp 파일만 업로드 해주세요 | d5add131181a5659d72e9c08c8515308e56fee34 | [
"Markdown",
"C++"
] | 7 | C++ | MunSeoHee/AlgorithmStudy | 8114aa40a6ebc9f1cd69f327aeaef5e2dcca4b81 | 129b6fbc5b87f36ea4bbf24f043e8166fa6616c1 |
refs/heads/master | <file_sep>
// Example
function ourRandomRange(ourMin, ourMax) {
return Math.floor(Math.random() * (ourMax - ourMin + 1)) + ourMin;
}
ourRandomRange(1, 9);
function randomRange(myMin, myMax) {
return Math.floor(Math.random() * (myMax - myMin + 1)) + myMin ; // to generate a number between two given ranges
}
var myRandom = randomRange(1, 3);
console.log("random number between given range: ", myRandom);
// next lesson
var testString = "<NAME> and <NAME> designed the first computer and the software that would have run on it.";
// Example
var expressionToGetSoftware = /software/gi;
var softwareCount = testString.match(expressionToGetSoftware).length; // counts the perticular pattern in given string
console.log("number of software counts: ", softwareCount);
<file_sep>var Bike = function() {
var gear = 2;
this.getGear = function(){
return gear;
};
this.setGear = function(change){
gear = change;
return gear;
};
};
var myBike = new Bike();
console.log(myBike.getGear());
console.log(myBike.setGear(7));
<file_sep>function truncateString(str, num) {
if(num==str.length){
return str;
}
}
console.log(truncateString("A-tisket a-tasket", "A-tisket a-tasket".length));<file_sep>function findLongestWord(str) {
var long;
var newStr = str.split(" ");
var count = newStr.length;
console.log(newStr[2].length);
long = newStr[0];
for(var i=0; i<count-1; i++){
console.log(newStr[i].length," length: ",newStr[i+1].length);
if(newStr[i+1].length>long.length){
long = newStr[i+1];
console.log(long);
}
}
return long;
}
console.log(findLongestWord("The quick brown fox jumped over the lazy dog"));
<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
<title>Document</title>
<style>
main{
padding: 30px;
background-color: red;
margin: 10px 40px 10px 40px;
}
p{
padding: 10px 50px 10px 50px;
}
.head{
padding: 10px 20px 10px 20px;
}
</style>
</head>
<body>
<div class="container main">
<div class="row">
<div class="col-md-12 text-center">
<h1 >Dr. <NAME></h1>
</div>
</div>
<div class="row">
<div class="col-md-12 text-center">
<h3>The man who saved a billion lives</h3>
</div>
</div>
<div class="row text-center" style="background-color: white">
<div class="container">
<img src="https://c2.staticflickr.com/4/3689/10613180113_fdf7bcd316_b.jpg" alt="">
</div>
<div class="container">
<p>Dr. <NAME>, third from left, trains biologists in Mexico on how to increase wheat yields - part of his life-long war on hunger.</p>
</div>
</div>
<div class="row">
<div class="col-md-12">
<h2 class="head text-center">Here's a time line of Dr. Borlaug's life:</h2>
<ul style="padding: 10px 100px 10px 200px">
<li><b>1914</b> - Born in Cresco, Iowa</li>
<li><b>1933</b>- Leaves his family's farm to attend the University of Minnesota, thanks to a Depression era program known as the "National Youth Administration"</li>
<li><b>1935</b>- Has to stop school and save up more money. Works in the Civilian Conservation Corps, helping starving Americans. "I saw how food changed them", he said. "All of this left scars on me."</li>
<li><b>1937</b> - Finishes university and takes a job in the US Forestry Service</li>
<li><b>1938</b> - Marries wife of 69 years <NAME>. Gets laid off due to budget cuts. Inspired by <NAME>, he returns to school study under Stakman, who teaches him about breeding pest-resistent plants.</li>
<li><b>1914</b> - Born in Cresco, Iowa</li>
</ul>
</div>
</div>
</div>
<script src="https://code.jquery.com/jquery-3.2.1.slim.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.3/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/js/bootstrap.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
</body>
</html><file_sep>function slasher(arr, howMany) {
var newArr;
if(arr.length>howMany){
newArr = arr.slice(howMany,arr.length);
return newArr;
}
else{
newArr = [];
return newArr;
}
}
console.log(slasher([1, 2, 3], 1));
<file_sep>
//declaring contructor
var Car = function() {
this.wheels = 4;
this.engines = 1;
this.seats = 5;
};
var MotorBike = function() {
this.wheels = 2;
this.engines = 1;
this.seats = 2;
};
// declaring variable
var car = {
"wheels":4,
"engines":1,
"seats":5
};
var motorBike = {
"wheels":2,
"engines":1,
"seats":2
};
console.log("this is first", car);
var myBike = new MotorBike();
myBike.turbo = "twin turbo";
console.log("this is last", myBike);
var yourBike = new MotorBike();
console.log(yourBike);
<file_sep>// checking the variable is empty or not
var a;
if(a || a===0){
console.log('this contains value');
}
else{
console.log('this is empty');
}
//----------------------------------------------------------------------------
<file_sep>var testString = "There are 3 cats but 4 dogs.";
var expression = /\d+/g; // counts the number of digits in the string. \s+ for white spaces. \S for non-white spaces as charecter. \S+ for non-white spaces as word.
var digitCount = testString.match(expression).length;
console.log(digitCount);
<file_sep>var randomNumberBetween0and19 = Math.floor(Math.random() * 20);
function randomWholeNum() {
// Only change code below this line.
var a = 0;
while(a==0){
a = Math.floor(Math.random()* 10);
console.log(a);
}
console.log(a);
}
randomWholeNum();
<file_sep>
function palindrome(str) {
str = str.replace(/[\W_]/g, "");
str = str.toLowerCase();
var newString = str.split("");
str = str.split("");
newString = newString.reverse();
newString = newString.join();
if(newString==str){
return true;
}
else{
return newString;
}
}
console.log(palindrome("eye"));
<file_sep>function confirmEnding(str, target) {
var sub;
sub = str.substr((str.length - target.length),target.length);
console.log(sub);
if(sub==target){
console.log('true');
return true;
}
else{
console.log('false');
return false;
}
}
confirmEnding("console", "e"); | b22e53c15c4a40a267d006c5ba454cc4cc6dae81 | [
"JavaScript",
"HTML"
] | 12 | JavaScript | akashverma07/tut-test | 975772da10df276d18049e07e2b797434f85e059 | 2ad511fc5c0d2ff354afc86a05b261fce7e85231 |
refs/heads/develop | <file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import java.io.File;
import java.io.IOException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.rauschig.jarchivelib.Archiver;
import org.rauschig.jarchivelib.ArchiverFactory;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.config.TestnetConfig;
import org.semux.db.DatabaseFactory;
import org.semux.db.LeveldbDatabase;
import org.semux.net.filter.SemuxIpFilterLoaderTest;
public class BlockchainImplMigrationTest {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void testMigrationBlockDbVersion001() throws IOException {
// extract a version 0 database from resource bundle
File dbVersion0Tarball = new File(
SemuxIpFilterLoaderTest.class.getResource("/database/database-v0.tgz").getFile());
Archiver archiver = ArchiverFactory.createArchiver("tar", "gz");
archiver.extract(dbVersion0Tarball, temporaryFolder.getRoot());
// load the database
DatabaseFactory dbFactory = new LeveldbDatabase.LevelDbFactory(new File(temporaryFolder.getRoot(), "database"));
Config config = new TestnetConfig(Constants.DEFAULT_DATA_DIR);
BlockchainImpl blockchain = new BlockchainImpl(config, dbFactory);
// the database should be upgraded to the latest version
assertThat("getDatabaseVersion", blockchain.getDatabaseVersion(), equalTo(BlockchainImpl.DATABASE_VERSION));
assertThat("getLatestBlockNumber", blockchain.getLatestBlockNumber(), equalTo(29L));
for (int i = 0; i <= blockchain.getLatestBlockNumber(); i++) {
assertThat("getBlock(" + i + ")", blockchain.getBlock(i), is(notNullValue()));
}
}
}
<file_sep>Welcome to the semux wiki!
* [Block Rewards](./Block-Rewards.md)
* [Bug Bounty Program](./Bug-Bounty-Program.md)
* [Configurations](./Configurations.md)
* [Delegates and Validators](./Delegates-and-Validators.md)
* [RESTful API](./RESTful-API.md)
* [Semux BFT Consensus](./Semux-BFT-Consensus.md)
* [Semux Virtual Machine](./Semux-Virtual-Machine.md)
* [Testnet](./Testnet.md)<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.semux.core.Blockchain;
import org.semux.net.Channel;
import org.semux.net.ChannelManager;
import org.semux.net.Peer;
@RunWith(Parameterized.class)
public class SemuxBftOnNewHeightTest {
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
// 0 validator
{ 10L, 0L, Collections.emptyList(), null },
// 1 validator
{ 10L, 0L, Collections.singletonList(mockValidator(10L)), null },
// 2 validators
{ 100L, 0L, Arrays.asList(
mockValidator(100L),
mockValidator(100L)), 101L },
{ 100L, 99L, Arrays.asList(
mockValidator(100L),
mockValidator(100L)), 101L },
// 2 validators, same height
{ 100L, 100L, Arrays.asList(
mockValidator(100L),
mockValidator(100L)), null },
// 2 validators, greater height
{ 100L, 101L, Arrays.asList(
mockValidator(10L),
mockValidator(100L)), null },
// 3 validators
{ 1000L, 0L, Arrays.asList(
mockValidator(10L),
mockValidator(100L),
mockValidator(1000L)), 101L },
// 4 validators
{ 10000L, 0L, Arrays.asList(
mockValidator(10L),
mockValidator(100L),
mockValidator(1000L),
mockValidator(10000L)), 101L },
// 5 validators
{ 1000000L, 0L, Arrays.asList(
mockValidator(10L),
mockValidator(100L),
mockValidator(1000L),
mockValidator(10000L),
mockValidator(100000L)), 1001L },
// Malicious validator with large height
{ Long.MAX_VALUE, 0L, Arrays.asList(
mockValidator(10L),
mockValidator(Long.MAX_VALUE - 1),
mockValidator(100L)), 101L },
// 100 validators with height from 1 ~ 100
{ 100L, 0L, LongStream.range(1L, 100L).mapToObj(SemuxBftOnNewHeightTest::mockValidator)
.collect(Collectors.toList()), 67L }
});
}
private Long newHeight;
private Long height;
private List<Channel> activeValidators;
private Long target;
public SemuxBftOnNewHeightTest(long newHeight, long height, List<Channel> activeValidators, Long target) {
this.newHeight = newHeight;
this.height = height;
this.activeValidators = activeValidators;
this.target = target;
}
@Test
public void testOnLargeNewHeight() {
// mock consensus
SemuxBft semuxBFT = mock(SemuxBft.class);
semuxBFT.chain = mock(Blockchain.class);
semuxBFT.height = height;
semuxBFT.validators = new ArrayList<>();
semuxBFT.channelMgr = mock(ChannelManager.class);
when(semuxBFT.channelMgr.getActiveChannels(any())).thenReturn(activeValidators);
doCallRealMethod().when(semuxBFT).onNewHeight(anyLong());
// start semuxBFT
semuxBFT.onNewHeight(newHeight);
if (target != null) {
verify(semuxBFT).sync(target);
} else {
verify(semuxBFT, never()).sync(anyLong());
}
}
private static Channel mockValidator(long latestBlockNumber) {
Channel mockChannel = mock(Channel.class);
Peer mockPeer = mock(Peer.class);
when(mockPeer.getLatestBlockNumber()).thenReturn(latestBlockNumber);
when(mockChannel.getRemotePeer()).thenReturn(mockPeer);
return mockChannel;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import static org.semux.core.Amount.ZERO;
import static org.semux.core.Amount.sum;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.MathContext;
import java.net.InetSocketAddress;
import java.time.Duration;
import java.time.Instant;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.semux.Kernel;
import org.semux.config.Config;
import org.semux.core.Amount;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Blockchain;
import org.semux.core.SyncManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionExecutor;
import org.semux.core.TransactionResult;
import org.semux.core.state.AccountState;
import org.semux.core.state.DelegateState;
import org.semux.crypto.Hex;
import org.semux.crypto.Key;
import org.semux.net.Channel;
import org.semux.net.ChannelManager;
import org.semux.net.msg.Message;
import org.semux.net.msg.ReasonCode;
import org.semux.net.msg.consensus.BlockMessage;
import org.semux.net.msg.consensus.GetBlockMessage;
import org.semux.util.ByteArray;
import org.semux.util.TimeUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Syncing manager downloads blocks from the network and imports them into
* blockchain.
* <p>
* The {@link #download()} and the {@link #process()} methods are not
* synchronized and need to be executed by one single thread at anytime.
* <p>
* The download/unfinished/pending queues are protected by lock.
*/
public class SemuxSync implements SyncManager {
private static final Logger logger = LoggerFactory.getLogger(SemuxSync.class);
private static final ThreadFactory factory = new ThreadFactory() {
private AtomicInteger cnt = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "sync-" + cnt.getAndIncrement());
}
};
private static final ScheduledExecutorService timer1 = Executors.newSingleThreadScheduledExecutor(factory);
private static final ScheduledExecutorService timer2 = Executors.newSingleThreadScheduledExecutor(factory);
private static final long MAX_DOWNLOAD_TIME = 10L * 1000L; // 10 seconds
private static final int MAX_UNFINISHED_JOBS = 16;
private static final int MAX_QUEUED_BLOCKS = 8192;
private static final int MAX_PENDING_BLOCKS = 512;
private static final Random random = new Random();
private Kernel kernel;
private Config config;
private Blockchain chain;
private ChannelManager channelMgr;
// task queues
private AtomicLong latestQueuedTask = new AtomicLong();
private TreeSet<Long> toDownload = new TreeSet<>();
private Map<Long, Long> toComplete = new HashMap<>();
private TreeSet<Pair<Block, Channel>> toProcess = new TreeSet<>(
Comparator.comparingLong(o -> o.getKey().getNumber()));
private final Object lock = new Object();
// current and target heights
private AtomicLong begin = new AtomicLong();
private AtomicLong current = new AtomicLong();
private AtomicLong target = new AtomicLong();
private Instant beginningInstant;
private final AtomicBoolean isRunning = new AtomicBoolean(false);
public SemuxSync(Kernel kernel) {
this.kernel = kernel;
this.config = kernel.getConfig();
this.chain = kernel.getBlockchain();
this.channelMgr = kernel.getChannelManager();
}
@Override
public void start(long targetHeight) {
if (isRunning.compareAndSet(false, true)) {
beginningInstant = Instant.now();
logger.info("Syncing started, best known block = {}", targetHeight - 1);
// [1] set up queues
synchronized (lock) {
toDownload.clear();
toComplete.clear();
toProcess.clear();
begin.set(chain.getLatestBlockNumber() + 1);
current.set(chain.getLatestBlockNumber() + 1);
target.set(targetHeight);
latestQueuedTask.set(chain.getLatestBlockNumber());
growToDownloadQueue();
}
// [2] start tasks
ScheduledFuture<?> download = timer1.scheduleAtFixedRate(this::download, 0, 5, TimeUnit.MILLISECONDS);
ScheduledFuture<?> process = timer2.scheduleAtFixedRate(this::process, 0, 5, TimeUnit.MILLISECONDS);
// [3] wait until the sync is done
while (isRunning.get()) {
synchronized (isRunning) {
try {
isRunning.wait(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
logger.info("Sync manager got interrupted");
break;
}
}
}
// [4] cancel tasks
download.cancel(true);
process.cancel(false);
Instant end = Instant.now();
logger.info("Syncing finished, took {}", TimeUtil.formatDuration(Duration.between(beginningInstant, end)));
}
}
@Override
public void stop() {
if (isRunning.compareAndSet(true, false)) {
synchronized (isRunning) {
isRunning.notifyAll();
}
}
}
@Override
public boolean isRunning() {
return isRunning.get();
}
@Override
public void onMessage(Channel channel, Message msg) {
if (!isRunning()) {
return;
}
switch (msg.getCode()) {
case BLOCK: {
BlockMessage blockMsg = (BlockMessage) msg;
Block block = blockMsg.getBlock();
synchronized (lock) {
if (toDownload.remove(block.getNumber())) {
growToDownloadQueue();
}
toComplete.remove(block.getNumber());
toProcess.add(Pair.of(block, channel));
}
break;
}
case BLOCK_HEADER: {
// TODO implement block header
break;
}
default: {
break;
}
}
}
private void download() {
if (!isRunning()) {
return;
}
synchronized (lock) {
// filter all expired tasks
long now = System.currentTimeMillis();
Iterator<Entry<Long, Long>> itr = toComplete.entrySet().iterator();
while (itr.hasNext()) {
Entry<Long, Long> entry = itr.next();
if (entry.getValue() + MAX_DOWNLOAD_TIME < now) {
logger.debug("Downloading of block #{} has expired", entry.getKey());
toDownload.add(entry.getKey());
itr.remove();
}
}
// quit if too many unfinished jobs
if (toComplete.size() > MAX_UNFINISHED_JOBS) {
logger.trace("Max unfinished jobs reached");
return;
}
// quit if no more tasks
if (toDownload.isEmpty()) {
return;
}
Long task = toDownload.first();
// quit if too many pending blocks
if (toProcess.size() > MAX_PENDING_BLOCKS && task > toProcess.first().getKey().getNumber()) {
logger.trace("Pending block queue is full");
return;
}
// get idle channels
List<Channel> channels = channelMgr.getIdleChannels().stream()
.filter(channel -> channel.getRemotePeer().getLatestBlockNumber() >= task)
.collect(Collectors.toList());
logger.trace("Idle peers = {}", channels.size());
// quit if no idle channels.
if (channels.isEmpty()) {
return;
}
// pick a random channel
Channel c = channels.get(random.nextInt(channels.size()));
// request the block
if (c.getRemotePeer().getLatestBlockNumber() >= task) {
logger.debug("Request block #{} from channel = {}", task, c.getId());
c.getMessageQueue().sendMessage(new GetBlockMessage(task));
if (toDownload.remove(task)) {
growToDownloadQueue();
}
toComplete.put(task, System.currentTimeMillis());
}
}
}
/**
* Queue new tasks sequentially starting from
* ${@link SemuxSync#latestQueuedTask} until the size of
* ${@link SemuxSync#toDownload} queue is greater than or equal to
* ${@value MAX_QUEUED_BLOCKS}
*/
private void growToDownloadQueue() {
// To avoid overhead, this method doesn't add new tasks before the queue is less
// than half-filled
if (toDownload.size() >= MAX_QUEUED_BLOCKS / 2) {
return;
}
for (long task = latestQueuedTask.get() + 1; //
task < target.get() && toDownload.size() < MAX_QUEUED_BLOCKS; //
task++) {
latestQueuedTask.accumulateAndGet(task, (prev, next) -> next > prev ? next : prev);
if (!chain.hasBlock(task)) {
toDownload.add(task);
}
}
}
private void process() {
if (!isRunning()) {
return;
}
long latest = chain.getLatestBlockNumber();
if (latest + 1 >= target.get()) {
stop();
return; // This is important because stop() only notify
}
Pair<Block, Channel> pair = null;
synchronized (lock) {
Iterator<Pair<Block, Channel>> iterator = toProcess.iterator();
while (iterator.hasNext()) {
Pair<Block, Channel> p = iterator.next();
if (p.getKey().getNumber() <= latest) {
iterator.remove();
} else if (p.getKey().getNumber() == latest + 1) {
iterator.remove();
pair = p;
break;
} else {
break;
}
}
}
if (pair != null) {
logger.info("{}", pair.getKey());
if (validateApplyBlock(pair.getKey())) {
synchronized (lock) {
if (toDownload.remove(pair.getKey().getNumber())) {
growToDownloadQueue();
}
toComplete.remove(pair.getKey().getNumber());
}
} else {
InetSocketAddress a = pair.getValue().getRemoteAddress();
logger.info("Invalid block from {}:{}", a.getAddress().getHostAddress(), a.getPort());
synchronized (lock) {
toDownload.add(pair.getKey().getNumber());
toComplete.remove(pair.getKey().getNumber());
}
// disconnect if the peer sends us invalid block
pair.getValue().getMessageQueue().disconnect(ReasonCode.BAD_PEER);
}
}
}
/**
* Check if a block is valid, and apply to the chain if yes.
*
* @param block
* @return
*/
protected boolean validateApplyBlock(Block block) {
AccountState as = chain.getAccountState().track();
DelegateState ds = chain.getDelegateState().track();
return validateBlock(block, as, ds) && applyBlock(block, as, ds);
}
protected boolean validateBlock(Block block, AccountState asSnapshot, DelegateState dsSnapshot) {
BlockHeader header = block.getHeader();
List<Transaction> transactions = block.getTransactions();
// [1] check block header
Block latest = chain.getLatestBlock();
if (!Block.validateHeader(latest.getHeader(), header)) {
logger.debug("Invalid block header");
return false;
}
// [2] check transactions and results
if (!Block.validateTransactions(header, transactions, config.network())
|| transactions.stream().mapToInt(Transaction::size).sum() > config.maxBlockTransactionsSize()) {
logger.debug("Invalid block transactions");
return false;
}
if (!Block.validateResults(header, block.getResults())) {
logger.debug("Invalid results");
return false;
}
if (transactions.stream().anyMatch(tx -> chain.hasTransaction(tx.getHash()))) {
logger.warn("Duplicated transaction hash is not allowed");
return false;
}
TransactionExecutor transactionExecutor = new TransactionExecutor(config);
// [3] evaluate transactions
List<TransactionResult> results = transactionExecutor.execute(transactions, asSnapshot, dsSnapshot);
if (!Block.validateResults(header, results)) {
logger.debug("Invalid transactions");
return false;
}
// [4] evaluate votes
return validateBlockVotes(block);
}
protected boolean validateBlockVotes(Block block) {
Set<String> validators = new HashSet<>(chain.getValidators());
int twoThirds = (int) Math.ceil(validators.size() * 2.0 / 3.0);
Vote vote = new Vote(VoteType.PRECOMMIT, Vote.VALUE_APPROVE, block.getNumber(), block.getView(),
block.getHash());
byte[] encoded = vote.getEncoded();
// check validity of votes
if (!block.getVotes().stream()
.allMatch(sig -> Key.verify(encoded, sig) && validators.contains(Hex.encode(sig.getAddress())))) {
logger.debug("Block votes are invalid");
return false;
}
// at least two thirds voters
if (block.getVotes().stream()
.map(sig -> new ByteArray(sig.getA()))
.collect(Collectors.toSet()).size() < twoThirds) {
logger.debug("Not enough votes, needs 2/3+");
return false;
}
return true;
}
protected boolean applyBlock(Block block, AccountState asSnapshot, DelegateState dsSnapshot) {
// [5] apply block reward and tx fees
Amount txsReward = block.getTransactions().stream().map(Transaction::getFee).reduce(ZERO, Amount::sum);
Amount reward = sum(config.getBlockReward(block.getNumber()), txsReward);
if (reward.gt0()) {
asSnapshot.adjustAvailable(block.getCoinbase(), reward);
}
// [6] commit the updates
asSnapshot.commit();
dsSnapshot.commit();
WriteLock writeLock = kernel.getStateLock().writeLock();
writeLock.lock();
try {
// [7] flush state to disk
chain.getAccountState().commit();
chain.getDelegateState().commit();
// [8] add block to chain
chain.addBlock(block);
} finally {
writeLock.unlock();
}
current.set(block.getNumber() + 1);
return true;
}
@Override
public SemuxSyncProgress getProgress() {
return new SemuxSyncProgress(
begin.get(),
current.get(),
target.get(),
Duration.between(beginningInstant != null ? beginningInstant : Instant.now(), Instant.now()));
}
public static class SemuxSyncProgress implements SyncManager.Progress {
final long beginHeight;
final long currentHeight;
final long targetHeight;
final Duration duration;
public SemuxSyncProgress(long beginHeight, long currentHeight, long targetHeight, Duration duration) {
this.beginHeight = beginHeight;
this.currentHeight = currentHeight;
this.targetHeight = targetHeight;
this.duration = duration;
}
@Override
public long getBeginHeight() {
return beginHeight;
}
@Override
public long getCurrentHeight() {
return currentHeight;
}
@Override
public long getTargetHeight() {
return targetHeight;
}
@Override
public Duration getSyncEstimation() {
Long speed = getSpeed();
if (speed == null || speed == 0) {
return null;
}
return Duration.ofMillis(BigInteger.valueOf(getTargetHeight())
.subtract(BigInteger.valueOf(getCurrentHeight()))
.multiply(BigInteger.valueOf(speed))
.longValue());
}
private Long getSpeed() {
Long downloadedBlocks = currentHeight - beginHeight;
if (downloadedBlocks <= 0 || duration.toMillis() == 0) {
return null;
}
return BigDecimal.valueOf(duration.toMillis())
.divide(BigDecimal.valueOf(downloadedBlocks), MathContext.DECIMAL64)
.round(MathContext.DECIMAL64)
.longValue();
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import java.time.Duration;
import org.semux.net.Channel;
import org.semux.net.msg.Message;
public interface SyncManager {
/**
* Starts sync manager, and sync blocks in [height, targetHeight).
*
* @param targetHeight
* the target height, exclusive
*/
void start(long targetHeight);
/**
* Stops sync manager.
*/
void stop();
/**
* Returns if this sync manager is running.
*
* @return
*/
boolean isRunning();
/**
* Callback when a message is received from network.
*
* @param channel
* the channel where the message is coming from
* @param msg
* the message
*/
void onMessage(Channel channel, Message msg);
/**
* Returns current synchronisation progress.
*
* @return a ${@link Progress} object
*/
Progress getProgress();
/**
* This interface represents synchronisation progress
*/
interface Progress {
/**
* @return the beginning height of this sync process.
*/
long getBeginHeight();
/**
* @return the current height of sync process.
*/
long getCurrentHeight();
/**
* @return the target height of sync process.
*/
long getTargetHeight();
/**
* @return the estimated time to complete this sync process. 30 days at maximum.
*/
Duration getSyncEstimation();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
public class Proof {
private long height;
private int view;
private List<Vote> votes;
public Proof(long height, int view, List<Vote> votes) {
this.height = height;
this.view = view;
this.votes = votes;
}
public Proof(long height, int view) {
this(height, view, Collections.emptyList());
}
public long getHeight() {
return height;
}
public int getView() {
return view;
}
public List<Vote> getVotes() {
return votes;
}
public byte[] toBytes() {
SimpleEncoder enc = new SimpleEncoder();
enc.writeLong(height);
enc.writeInt(view);
enc.writeInt(votes.size());
for (Vote v : votes) {
enc.writeBytes(v.toBytes());
}
return enc.toBytes();
}
public static Proof fromBytes(byte[] bytes) {
SimpleDecoder dec = new SimpleDecoder(bytes);
long height = dec.readLong();
int view = dec.readInt();
List<Vote> votes = new ArrayList<>();
int n = dec.readInt();
for (int i = 0; i < n; i++) {
votes.add(Vote.fromBytes(dec.readBytes()));
}
return new Proof(height, view, votes);
}
@Override
public String toString() {
return "Proof [height=" + height + ", view=" + view + ", # votes=" + votes.size() + "]";
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core.state;
import static org.semux.core.Amount.ZERO;
import java.util.Arrays;
import org.semux.core.Amount;
import org.semux.crypto.Hex;
import org.semux.util.Bytes;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
public class Delegate {
protected byte[] address;
protected byte[] name;
protected long registeredAt;
protected Amount votes = ZERO;
/**
* Create a delegate instance.
*
* @param address
* @param name
* @param registeredAt
* @param votes
*/
public Delegate(byte[] address, byte[] name, long registeredAt, Amount votes) {
this.address = address;
this.name = name;
this.registeredAt = registeredAt;
this.votes = votes;
}
public byte[] getAddress() {
return address;
}
public String getAddressString() {
return Hex.encode(getAddress());
}
public byte[] getName() {
return name;
}
public String getNameString() {
return Bytes.toString(name);
}
public long getRegisteredAt() {
return registeredAt;
}
public Amount getVotes() {
return votes;
}
void setVotes(Amount votes) {
this.votes = votes;
}
/**
* Serializes this delegate object into byte array.
*
* @return
*/
public byte[] toBytes() {
SimpleEncoder enc = new SimpleEncoder();
enc.writeBytes(name);
enc.writeLong(registeredAt);
enc.writeAmount(votes);
return enc.toBytes();
}
/**
* Parses a delegate from a byte array.
*
* @param address
* @param bytes
* @return
*/
public static Delegate fromBytes(byte[] address, byte[] bytes) {
SimpleDecoder dec = new SimpleDecoder(bytes);
byte[] name = dec.readBytes();
long registeredAt = dec.readLong();
Amount votes = dec.readAmount();
return new Delegate(address, name, registeredAt, votes);
}
@Override
public String toString() {
return "Delegate [address=" + Hex.encode(address) + ", name=" + Arrays.toString(name) + ", registeredAt="
+ registeredAt + ", votes=" + votes.getNano() + "]";
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.semux.consensus.ValidatorActivatedFork.UNIFORM_DISTRIBUTION;
import static org.semux.core.Amount.Unit.SEM;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
import org.semux.config.Constants;
import org.semux.config.MainnetConfig;
import org.semux.core.Block;
import org.semux.core.Blockchain;
import org.semux.core.BlockchainImpl;
import org.semux.core.PendingManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.TransactionType;
import org.semux.crypto.Key;
import org.semux.rules.KernelRule;
import org.semux.rules.TemporaryDatabaseRule;
import org.semux.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@RunWith(MockitoJUnitRunner.class)
public class SemuxBftTest {
private static final Logger logger = LoggerFactory.getLogger(SemuxBftTest.class);
@Rule
public KernelRule kernelRule = new KernelRule(51610, 51710);
@Rule
public TemporaryDatabaseRule temporaryDBRule = new TemporaryDatabaseRule();
@Test
public void testIsPrimaryH256() {
List<String> validators = IntStream.range(1, 100).boxed().map(i -> String.format("v%d", i))
.collect(Collectors.toList());
SemuxBft bft = mock(SemuxBft.class);
bft.config = new MainnetConfig(Constants.DEFAULT_DATA_DIR);
bft.validators = validators;
bft.chain = mock(Blockchain.class);
when(bft.chain.forkActivated(anyLong(), eq(UNIFORM_DISTRIBUTION))).thenReturn(false);
when(bft.isPrimary(anyLong(), anyInt(), anyString())).thenCallRealMethod();
testIsPrimaryConsecutiveValidatorProbability(bft);
}
@Test
public void testIsPrimaryUniformDist() {
List<String> validators = IntStream.range(1, 100).boxed().map(i -> String.format("v%d", i))
.collect(Collectors.toList());
SemuxBft bft = mock(SemuxBft.class);
bft.config = new MainnetConfig(Constants.DEFAULT_DATA_DIR);
bft.validators = validators;
bft.chain = mock(Blockchain.class);
when(bft.chain.forkActivated(anyLong(), eq(UNIFORM_DISTRIBUTION))).thenReturn(true);
when(bft.isPrimary(anyLong(), anyInt(), anyString())).thenCallRealMethod();
testIsPrimaryConsecutiveValidatorProbability(bft);
}
private void testIsPrimaryConsecutiveValidatorProbability(SemuxBft bft) {
int blocks = 1000;
int repeat = 0;
int last = -1;
Random r = new Random(System.nanoTime());
for (int i = 0; i < blocks; i++) {
int view = r.nextDouble() < 0.05 ? 1 : 0;
for (int j = 0; j < bft.validators.size(); j++) {
if (bft.isPrimary(i, view, bft.validators.get(j))) {
if (j == last) {
repeat++;
}
last = j;
}
}
}
logger.info("Consecutive validator probability: {}/{}", repeat, blocks);
assertEquals(1.0 / bft.validators.size(), (double) repeat / blocks, 0.05);
}
/**
* https://github.com/bitcoin/bips/blob/master/bip-0030.mediawiki
*/
@Test
public void testDuplicatedTransaction() {
// mock blockchain with a single transaction
Key to = new Key();
Key from1 = new Key();
long time = System.currentTimeMillis();
Transaction tx1 = createTransaction(to, from1, time, 0);
kernelRule.getKernel().setBlockchain(new BlockchainImpl(kernelRule.getKernel().getConfig(), temporaryDBRule));
kernelRule.getKernel().getBlockchain().getAccountState().adjustAvailable(from1.toAddress(), SEM.of(1000));
Block block1 = kernelRule.createBlock(Collections.singletonList(tx1));
kernelRule.getKernel().getBlockchain().addBlock(block1);
SemuxBft semuxBFT = new SemuxBft(kernelRule.getKernel());
// create a tx with the same hash with tx1 from a different signer in the second
// block
Key from2 = new Key();
kernelRule.getKernel().getBlockchain().getAccountState().adjustAvailable(from2.toAddress(), SEM.of(1000));
Transaction tx2 = createTransaction(to, from2, time, 0);
Block block2 = kernelRule.createBlock(Collections.singletonList(tx2));
// this test case is valid if and only if tx1 and tx2 have the same tx hash
assertTrue(Arrays.equals(tx1.getHash(), tx2.getHash()));
// the block should be rejected because of the duplicated tx
assertFalse(semuxBFT.validateBlock(block2.getHeader(), block2.getTransactions()));
}
@Test
public void testFilterPendingTransactions() {
Key to = new Key();
Key from = new Key();
long time = System.currentTimeMillis();
Transaction tx1 = createTransaction(to, from, time, 0);
Transaction tx2 = createTransaction(to, from, time, 1);
kernelRule.getKernel().setBlockchain(new BlockchainImpl(kernelRule.getKernel().getConfig(), temporaryDBRule));
// pending manager has only tx1 (validated)
PendingManager.PendingTransaction pending = new PendingManager.PendingTransaction(tx1,
new TransactionResult(true));
when(kernelRule.getKernel().getPendingManager().getPendingTransactions())
.thenReturn(Collections.singletonList(pending));
SemuxBft semuxBFT = new SemuxBft(kernelRule.getKernel());
// tx1 should filter out
assertTrue(semuxBFT.getUnvalidatedTransactions(Collections.singletonList(tx1)).isEmpty());
// other transactions should remain
assertFalse(semuxBFT.getUnvalidatedTransactions(Collections.singletonList(tx2)).isEmpty());
// test that invalid pending are not filtered
when(kernelRule.getKernel().getPendingManager().getPendingTransactions(anyInt()))
.thenReturn(Collections.singletonList(new PendingManager.PendingTransaction(tx2,
new TransactionResult(false))));
assertFalse(semuxBFT.getUnvalidatedTransactions(Collections.singletonList(tx2)).isEmpty());
}
private Transaction createTransaction(Key to, Key from, long time, long nonce) {
return new Transaction(
kernelRule.getKernel().getConfig().network(),
TransactionType.TRANSFER,
to.toAddress(),
SEM.of(10),
kernelRule.getKernel().getConfig().minTransactionFee(),
nonce,
time,
Bytes.EMPTY_BYTES).sign(from);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.panel;
import static org.semux.core.Amount.sum;
import java.awt.Color;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.text.ParseException;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import javax.swing.ButtonGroup;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
import javax.swing.LayoutStyle.ComponentPlacement;
import javax.swing.SwingConstants;
import javax.swing.border.LineBorder;
import org.semux.Kernel;
import org.semux.Network;
import org.semux.config.Config;
import org.semux.core.Amount;
import org.semux.core.PendingManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionType;
import org.semux.crypto.CryptoException;
import org.semux.crypto.Hex;
import org.semux.crypto.Key;
import org.semux.gui.Action;
import org.semux.gui.SemuxGui;
import org.semux.gui.SwingUtil;
import org.semux.gui.model.WalletAccount;
import org.semux.gui.model.WalletModel;
import org.semux.message.GuiMessages;
import org.semux.util.Bytes;
import org.semux.util.exception.UnreachableException;
public class SendPanel extends JPanel implements ActionListener {
private static final long serialVersionUID = 1L;
private transient SemuxGui gui;
private transient WalletModel model;
private transient Kernel kernel;
private transient Config config;
private JComboBox<AccountItem> selectFrom;
private JTextField txtTo;
private JTextField txtAmount;
private JTextField txtFee;
private JTextField txtData;
private JRadioButton rdbtnText;
private JRadioButton rdbtnHex;
public SendPanel(SemuxGui gui, JFrame frame) {
this.gui = gui;
this.model = gui.getModel();
this.model.addListener(this);
this.kernel = gui.getKernel();
this.config = kernel.getConfig();
setBorder(new LineBorder(Color.LIGHT_GRAY));
JLabel lblFrom = new JLabel(GuiMessages.get("From") + ":");
lblFrom.setHorizontalAlignment(SwingConstants.RIGHT);
selectFrom = new JComboBox<>();
selectFrom.setFont(new Font(Font.MONOSPACED, Font.PLAIN, 13));
JLabel lblTo = new JLabel(GuiMessages.get("To") + ":");
lblTo.setHorizontalAlignment(SwingConstants.RIGHT);
txtTo = SwingUtil.textFieldWithCopyPastePopup();
txtTo.setName("txtTo");
txtTo.setColumns(24);
txtTo.setActionCommand(Action.SEND.name());
txtTo.addActionListener(this);
JLabel lblAmount = new JLabel(GuiMessages.get("Amount") + ":");
lblAmount.setHorizontalAlignment(SwingConstants.RIGHT);
txtAmount = SwingUtil.textFieldWithCopyPastePopup();
txtAmount.setName("txtAmount");
txtAmount.setColumns(10);
txtAmount.setActionCommand(Action.SEND.name());
txtAmount.addActionListener(this);
JLabel lblFee = new JLabel(GuiMessages.get("Fee") + ":");
lblFee.setHorizontalAlignment(SwingConstants.RIGHT);
lblFee.setToolTipText(GuiMessages.get("FeeTip", SwingUtil.formatAmount(config.minTransactionFee())));
txtFee = SwingUtil.textFieldWithCopyPastePopup();
txtFee.setName("txtFee");
txtFee.setColumns(10);
txtFee.setActionCommand(Action.SEND.name());
txtFee.addActionListener(this);
JLabel lblData = new JLabel(GuiMessages.get("Data") + ":");
lblData.setHorizontalAlignment(SwingConstants.RIGHT);
lblData.setToolTipText(GuiMessages.get("DataTip"));
txtData = SwingUtil.textFieldWithCopyPastePopup();
txtData.setName("txtData");
txtData.setColumns(10);
txtData.setActionCommand(Action.SEND.name());
txtData.addActionListener(this);
txtData.setToolTipText(GuiMessages.get("DataTip"));
JLabel lblSem1 = new JLabel("SEM");
JLabel lblSem2 = new JLabel("SEM");
JButton btnSend = new JButton(GuiMessages.get("Send"));
btnSend.setName("btnSend");
btnSend.addActionListener(this);
btnSend.setActionCommand(Action.SEND.name());
JButton btnClear = new JButton(GuiMessages.get("Clear"));
btnClear.setName("btnClear");
btnClear.addActionListener(this);
btnClear.setActionCommand(Action.CLEAR.name());
JButton btnAddressBook = new JButton(GuiMessages.get("AddressBook"));
btnAddressBook.setName("btnAddressBook");
btnAddressBook.addActionListener(this);
btnAddressBook.setActionCommand(Action.SHOW_ADDRESS_BOOK.name());
rdbtnText = new JRadioButton(GuiMessages.get("Text"));
rdbtnText.setSelected(true);
rdbtnHex = new JRadioButton(GuiMessages.get("Hex"));
ButtonGroup btnGroupDataType = new ButtonGroup();
btnGroupDataType.add(rdbtnText);
btnGroupDataType.add(rdbtnHex);
// @formatter:off
GroupLayout groupLayout = new GroupLayout(this);
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(62)
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING)
.addComponent(lblFrom)
.addComponent(lblTo)
.addComponent(lblAmount)
.addComponent(lblFee)
.addComponent(lblData))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(btnClear)
.addGap(10)
.addComponent(btnSend))
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(selectFrom, 0, 400, Short.MAX_VALUE)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING, false)
.addComponent(txtAmount, GroupLayout.DEFAULT_SIZE, 255, Short.MAX_VALUE)
.addComponent(txtFee)
.addComponent(txtData))
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(12)
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING)
.addComponent(lblSem1)
.addComponent(lblSem2)))
.addGroup(groupLayout.createSequentialGroup()
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(rdbtnText)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(rdbtnHex))))
.addGroup(Alignment.TRAILING, groupLayout.createSequentialGroup()
.addComponent(txtTo, GroupLayout.DEFAULT_SIZE, 300, Short.MAX_VALUE)
.addGap(18)
.addComponent(btnAddressBook)))
.addGap(59))))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblFrom)
.addComponent(selectFrom, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblTo)
.addComponent(txtTo, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE)
.addComponent(btnAddressBook))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblAmount)
.addComponent(txtAmount, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE)
.addComponent(lblSem1))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblFee)
.addComponent(txtFee, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE)
.addComponent(lblSem2))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblData)
.addComponent(txtData, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE)
.addComponent(rdbtnText)
.addComponent(rdbtnHex))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(btnSend)
.addComponent(btnClear))
.addContainerGap(30, Short.MAX_VALUE))
);
setLayout(groupLayout);
// @formatter:on
refresh();
clear();
}
public String getToText() {
return txtTo.getText().trim();
}
public void setToText(byte[] address) {
txtTo.setText(Hex.encode(address));
}
public Amount getAmountText() throws ParseException {
return SwingUtil.parseAmount(txtAmount.getText().trim());
}
public void setAmountText(Amount a) {
txtAmount.setText(SwingUtil.formatAmountNoUnit(a));
}
public Amount getFeeText() throws ParseException {
return SwingUtil.parseAmount(txtFee.getText().trim());
}
public void setFeeText(Amount f) {
txtFee.setText(SwingUtil.formatAmountNoUnit(f));
}
public String getDataText() {
return txtData.getText().trim();
}
public void setDataText(String dataText) {
txtData.setText(dataText.trim());
}
@Override
public synchronized void actionPerformed(ActionEvent e) {
Action action = Action.valueOf(e.getActionCommand());
switch (action) {
case REFRESH:
refresh();
break;
case SEND:
send();
break;
case CLEAR:
clear();
break;
case SHOW_ADDRESS_BOOK:
showAddressBook();
break;
default:
throw new UnreachableException();
}
}
/**
* Refreshes the GUI.
*/
protected void refresh() {
List<WalletAccount> list = model.getAccounts();
// record selected account
AccountItem selected = (AccountItem) selectFrom.getSelectedItem();
// update account list
selectFrom.removeAllItems();
for (WalletAccount aList : list) {
selectFrom.addItem(new AccountItem(aList));
}
// recover selected account
if (selected != null) {
for (int i = 0; i < list.size(); i++) {
if (Arrays.equals(list.get(i).getAddress(), selected.account.getAddress())) {
selectFrom.setSelectedIndex(i);
break;
}
}
}
}
/**
* Sends transaction.
*/
protected void send() {
try {
WalletAccount acc = getSelectedAccount();
Amount value = getAmountText();
Amount fee = getFeeText();
String data = getDataText();
// decode0x recipient address
byte[] to = Hex.decode0x(getToText());
if (acc == null) {
showErrorDialog(GuiMessages.get("SelectAccount"));
} else if (value.lte0()) {
showErrorDialog(GuiMessages.get("EnterValidValue"));
} else if (fee.lt(config.minTransactionFee())) {
showErrorDialog(GuiMessages.get("TransactionFeeTooLow"));
} else if (sum(value, fee).gt(acc.getAvailable())) {
showErrorDialog(GuiMessages.get("InsufficientFunds", SwingUtil.formatAmount(sum(value, fee))));
} else if (to.length != Key.ADDRESS_LEN) {
showErrorDialog(GuiMessages.get("InvalidReceivingAddress"));
} else if (Bytes.of(data).length > config.maxTransactionDataSize(TransactionType.TRANSFER)) {
showErrorDialog(
GuiMessages.get("InvalidData", config.maxTransactionDataSize(TransactionType.TRANSFER)));
} else {
int ret = JOptionPane.showConfirmDialog(this,
GuiMessages.get("TransferInfo", SwingUtil.formatAmountFull(value), Hex.encode0x(to)),
GuiMessages.get("ConfirmTransfer"), JOptionPane.YES_NO_OPTION);
if (ret == JOptionPane.YES_OPTION) {
PendingManager pendingMgr = kernel.getPendingManager();
byte[] rawData = rdbtnText.isSelected() ? Bytes.of(data) : Hex.decode0x(data);
Network network = kernel.getConfig().network();
TransactionType type = TransactionType.TRANSFER;
byte[] from = acc.getKey().toAddress();
long nonce = pendingMgr.getNonce(from);
long timestamp = System.currentTimeMillis();
Transaction tx = new Transaction(network, type, to, value, fee, nonce, timestamp, rawData);
tx.sign(acc.getKey());
sendTransaction(pendingMgr, tx);
}
}
} catch (ParseException | CryptoException ex) {
showErrorDialog(GuiMessages.get("EnterValidValue"));
}
}
/**
* Clears all input fields.
*/
protected void clear() {
setToText(Bytes.EMPTY_BYTES);
setAmountText(Amount.ZERO);
setFeeText(config.minTransactionFee());
setDataText("");
}
/**
* Shows the address book.
*/
protected void showAddressBook() {
gui.getAddressBookDialog().setVisible(true);
}
/**
* Returns the selected account.
*
* @return
*/
protected WalletAccount getSelectedAccount() {
int idx = selectFrom.getSelectedIndex();
return (idx == -1) ? null : model.getAccounts().get(idx);
}
/**
* Adds a transaction to the pending manager.
*
* @param pendingMgr
* @param tx
*/
protected void sendTransaction(PendingManager pendingMgr, Transaction tx) {
PendingManager.ProcessTransactionResult result = pendingMgr.addTransactionSync(tx);
if (result.error == null) {
JOptionPane.showMessageDialog(
this,
GuiMessages.get("TransactionSent", 30),
GuiMessages.get("SuccessDialogTitle"),
JOptionPane.INFORMATION_MESSAGE);
clear();
} else {
showErrorDialog(GuiMessages.get("TransactionFailed", result.error.toString()));
}
}
/**
* Shows an error dialog.
*
* @param message
*/
protected void showErrorDialog(String message) {
JOptionPane.showMessageDialog(
this,
message,
GuiMessages.get("ErrorDialogTitle"),
JOptionPane.ERROR_MESSAGE);
}
/**
* Represents an item in the account drop list.
*/
protected static class AccountItem {
WalletAccount account;
String name;
public AccountItem(WalletAccount a) {
Optional<String> alias = a.getName();
this.account = a;
this.name = Hex.PREF + account.getKey().toAddressString() + ", " // address
+ (alias.map(s -> s + ", ").orElse("")) // alias
+ SwingUtil.formatAmount(account.getAvailable()); // available
}
@Override
public String toString() {
return this.name;
}
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.semux.cli.SemuxOption;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.config.DevnetConfig;
import org.semux.config.MainnetConfig;
import org.semux.config.TestnetConfig;
import org.semux.exception.LauncherException;
import org.semux.log.LoggerConfigurator;
import org.semux.message.CliMessages;
import org.semux.util.SystemUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class Launcher {
private static final Logger logger = LoggerFactory.getLogger(Launcher.class);
protected static final String DEVNET = "devnet";
protected static final String TESTNET = "testnet";
protected static final String MAINNET = "mainnet";
/**
* Here we make sure that all shutdown hooks will be executed in the order of
* registration. This is necessary to be manually maintained because
* ${@link Runtime#addShutdownHook(Thread)} starts shutdown hooks concurrently
* in unspecified order.
*/
private static List<Pair<String, Runnable>> shutdownHooks = Collections.synchronizedList(new ArrayList<>());
static {
Runtime.getRuntime().addShutdownHook(new Thread(Launcher::shutdownHook, "shutdown-hook"));
}
private Options options = new Options();
private String dataDir = Constants.DEFAULT_DATA_DIR;
private String network = MAINNET;
private int coinbase = 0;
private String password = <PASSWORD>;
public Launcher() {
Option dataDirOption = Option.builder()
.longOpt(SemuxOption.DATA_DIR.toString())
.desc(CliMessages.get("SpecifyDataDir"))
.hasArg(true).numberOfArgs(1).optionalArg(false).argName("path").type(String.class)
.build();
addOption(dataDirOption);
Option networkOption = Option.builder()
.longOpt(SemuxOption.NETWORK.toString()).desc(CliMessages.get("SpecifyNetwork"))
.hasArg(true).numberOfArgs(1).optionalArg(false).argName("name").type(String.class)
.build();
addOption(networkOption);
}
/**
* Creates an instance of {@link Config} based on the given `--network` option.
* <p>
* Defaults to MainNet.
*
* @return the configuration
*/
public Config getConfig() {
switch (getNetwork()) {
case TESTNET:
return new TestnetConfig(getDataDir());
case DEVNET:
return new DevnetConfig(getDataDir());
default:
return new MainnetConfig(getDataDir());
}
}
/**
* Returns the network.
*
* @return
*/
public String getNetwork() {
return network;
}
/**
* Returns the data directory.
*
* @return
*/
public String getDataDir() {
return dataDir;
}
/**
* Returns the coinbase.
*
* @return
*/
public int getCoinbase() {
return coinbase;
}
/**
* Returns the provided password if any.
*
* @return
*/
public String getPassword() {
return password;
}
/**
* Parses options from the given arguments.
*
* @param args
* @return
* @throws ParseException
*/
protected CommandLine parseOptions(String[] args) throws ParseException {
CommandLineParser parser = new DefaultParser();
CommandLine cmd = parser.parse(getOptions(), args);
if (cmd.hasOption(SemuxOption.DATA_DIR.toString())) {
setDataDir(cmd.getOptionValue(SemuxOption.DATA_DIR.toString()));
}
if (cmd.hasOption(SemuxOption.NETWORK.toString())) {
setNetwork(cmd.getOptionValue(SemuxOption.NETWORK.toString()));
}
return cmd;
}
/**
* Set up customized logger configuration.
*
* @param args
* @throws ParseException
*/
protected void setupLogger(String[] args) throws ParseException {
// parse options
parseOptions(args);
LoggerConfigurator.configure(new File(dataDir));
}
/**
* Returns all supported options.
*
* @return
*/
protected Options getOptions() {
return options;
}
/**
* Adds a supported option.
*
* @param option
*/
protected void addOption(Option option) {
options.addOption(option);
}
/**
* Sets the network.
*
* @param network
*/
protected void setNetwork(String network) {
this.network = network;
}
/**
* Sets the data directory.
*
* @param dataDir
*/
protected void setDataDir(String dataDir) {
this.dataDir = dataDir;
}
/**
* Sets the coinbase.
*
* @param coinbase
*/
protected void setCoinbase(int coinbase) {
this.coinbase = coinbase;
}
/**
* Sets the password.
*
* @param password
*/
protected void setPassword(String password) {
this.password = password;
}
/**
* Check runtime prerequisite.
*
*/
protected static void checkPrerequisite() {
switch (SystemUtil.getOsName()) {
case WINDOWS:
if (!SystemUtil.isWindowsVCRedist2012Installed()) {
throw new LauncherException(
"Microsoft Visual C++ 2012 Redistributable Package is not installed. Please visit: https://www.microsoft.com/en-us/download/details.aspx?id=30679");
}
break;
default:
}
}
/**
* Registers a shutdown hook which will be executed in the order of
* registration.
*
* @param name
* @param runnable
*/
public static synchronized void registerShutdownHook(String name, Runnable runnable) {
shutdownHooks.add(Pair.of(name, runnable));
}
/**
* Call registered shutdown hooks in the order of registration.
*
*/
private static synchronized void shutdownHook() {
// shutdown hooks
for (Pair<String, Runnable> r : shutdownHooks) {
try {
logger.info("Shutting down {}", r.getLeft());
r.getRight().run();
} catch (Exception e) {
logger.info("Failed to shutdown {}", r.getLeft(), e);
}
}
// flush log4j async loggers
LogManager.shutdown();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.MathContext;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BigIntegerUtilTest {
private static Logger logger = LoggerFactory.getLogger(BigIntegerUtil.class);
private BigInteger one = BigInteger.valueOf(1);
private BigInteger two = BigInteger.valueOf(2);
private BigInteger three = BigInteger.valueOf(3);
@Test
public void testIsZero() {
assertTrue(BigIntegerUtil.isZero(BigInteger.ZERO));
assertFalse(BigIntegerUtil.isZero(BigInteger.ONE));
}
@Test
public void testIsEqual() {
assertTrue(BigIntegerUtil.isEqual(two, two));
assertFalse(BigIntegerUtil.isEqual(two, three));
}
@Test(expected = NullPointerException.class)
public void testIsEqual2() {
assertFalse(BigIntegerUtil.isEqual(two, null));
}
@Test
public void testIsNotEqual() {
assertFalse(BigIntegerUtil.isNotEqual(two, two));
assertTrue(BigIntegerUtil.isNotEqual(two, three));
}
@Test(expected = NullPointerException.class)
public void testIsNotEqual2() {
assertTrue(BigIntegerUtil.isNotEqual(two, null));
}
@Test
public void testIsLessThan() {
assertTrue(BigIntegerUtil.isLessThan(two, three));
assertFalse(BigIntegerUtil.isLessThan(two, two));
assertFalse(BigIntegerUtil.isLessThan(two, one));
}
@Test
public void testIsGreaterThan() {
assertTrue(BigIntegerUtil.isGreaterThan(two, one));
assertFalse(BigIntegerUtil.isGreaterThan(two, two));
assertFalse(BigIntegerUtil.isGreaterThan(two, three));
}
@Test
public void testIsPositive() {
assertTrue(BigIntegerUtil.isPositive(one));
assertFalse(BigIntegerUtil.isPositive(one.negate()));
}
@Test
public void testIsNegative() {
assertFalse(BigIntegerUtil.isNegative(one));
assertTrue(BigIntegerUtil.isNegative(one.negate()));
}
@Test
public void testMax() {
assertEquals(two, BigIntegerUtil.max(two, one));
assertEquals(two, BigIntegerUtil.max(one, two));
}
@Test
public void testMin() {
assertEquals(one, BigIntegerUtil.min(two, one));
assertEquals(one, BigIntegerUtil.min(one, two));
}
@Test
public void testSum() {
assertEquals(three, BigIntegerUtil.sum(one, two));
}
/**
* Test if the random function is uniformly distributed. Credits to:
* https://github.com/dwdyer/uncommons-maths/blob/462c043ffbc8df4bd45c490e447ea1ba636b1f15/core/src/java/test/org/uncommons/maths/random/DiscreteUniformGeneratorTest.java
*/
@Test
public void testRandomUniformlyDistributed() {
final int N = 10000, MAX = 100;
BigInteger[] data = new BigInteger[N];
BigInteger sum = BigInteger.ZERO;
for (int i = 0; i < N; i++) {
data[i] = BigIntegerUtil.random(BigInteger.valueOf(i)).mod(BigInteger.valueOf(MAX));
sum = sum.add(data[i]);
}
BigDecimal mean = new BigDecimal(sum).divide(BigDecimal.valueOf(N), MathContext.DECIMAL128);
BigDecimal squaredDiffs = BigDecimal.ZERO;
for (int i = 0; i < N; i++) {
BigDecimal diff = mean.subtract(new BigDecimal(data[i]));
squaredDiffs = squaredDiffs.add(diff.pow(2));
}
BigDecimal variance = squaredDiffs.divide(BigDecimal.valueOf(N), MathContext.DECIMAL128);
BigDecimal deviation = BigDecimal.valueOf(Math.sqrt(variance.doubleValue()));
BigDecimal expectedDeviation = BigDecimal.valueOf(MAX / Math.sqrt(12));
BigDecimal expectedMean = BigDecimal.valueOf(MAX).divide(BigDecimal.valueOf(2), MathContext.DECIMAL128);
logger.info("Mean = {}, Expected Mean = {}", mean, expectedMean);
logger.info("Deviation = {}, Expected Deviation = {}", deviation, expectedDeviation);
assertThat("deviation",
deviation.subtract(expectedDeviation).abs(),
lessThanOrEqualTo(BigDecimal.valueOf(0.02)));
assertThat("mean",
mean.subtract(expectedMean).abs(),
lessThanOrEqualTo(BigDecimal.valueOf(1)));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import org.semux.Network;
import org.semux.config.Config;
import org.semux.core.Amount;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Blockchain;
import org.semux.core.PendingManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.TransactionType;
import org.semux.core.Wallet;
import org.semux.core.state.AccountState;
import org.semux.core.state.DelegateState;
import org.semux.crypto.Key;
import org.semux.net.ChannelManager;
import org.semux.net.NodeManager;
import org.semux.util.BasicAuth;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
public abstract class ApiHandlerTestBase {
protected SemuxApiMock api;
protected Config config;
protected Wallet wallet;
protected Blockchain chain;
protected AccountState accountState;
protected DelegateState delegateState;
protected PendingManager pendingMgr;
protected NodeManager nodeMgr;
protected ChannelManager channelMgr;
protected <T extends ApiHandlerResponse> T request(String uri, Class<T> clazz) throws IOException {
URL u = new URL("http://" + config.apiListenIp() + ":" + config.apiListenPort() + uri);
HttpURLConnection con = (HttpURLConnection) u.openConnection();
con.setRequestProperty("Authorization", BasicAuth.generateAuth(config.apiUsername(), config.apiPassword()));
return new ObjectMapper().readValue(con.getInputStream(), clazz);
}
protected <T extends ApiHandlerResponse> T postRequest(String uri, String body, Class<T> clazz)
throws IOException {
URL u = new URL("http://" + config.apiListenIp() + ":" + config.apiListenPort() + uri);
HttpURLConnection con = (HttpURLConnection) u.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("Authorization", BasicAuth.generateAuth(config.apiUsername(), config.apiPassword()));
con.setDoOutput(true);
// write body
OutputStream os = con.getOutputStream();
BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
bufferedWriter.write(body);
bufferedWriter.flush();
bufferedWriter.close();
os.close();
return new ObjectMapper().readValue(con.getInputStream(), clazz);
}
protected Block createBlock(Blockchain chain, List<Transaction> transactions, List<TransactionResult> results) {
Key key = new Key();
long number = chain.getLatestBlockNumber() + 1;
byte[] coinbase = key.toAddress();
byte[] prevHash = chain.getLatestBlockHash();
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(transactions);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(results);
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
return new Block(header, transactions, results);
}
protected Transaction createTransaction() {
return createTransaction(new Key(), new Key(), Amount.ZERO);
}
protected Transaction createTransaction(Key from, Key to, Amount value) {
Network network = config.network();
TransactionType type = TransactionType.TRANSFER;
Amount fee = Amount.ZERO;
long nonce = 1;
long timestamp = System.currentTimeMillis();
byte[] data = {};
return new Transaction(network, type, to.toAddress(), value, fee, nonce, timestamp, data).sign(from);
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import org.junit.After;
import org.junit.Test;
public class IOUtilTest {
private static final Charset CHARSET = UTF_8;
private File f1 = new File("test1");
private File f2 = new File("test2");
@Test
public void testWriteToFileExists() throws IOException {
IOUtil.writeToFile(Bytes.of("123"), f1);
assertEquals("123", Bytes.toString(IOUtil.readFile(f1)));
IOUtil.writeToFile(Bytes.of("456"), f1);
assertEquals("456", IOUtil.readFileAsString(f1));
}
@Test
public void testCopyFile() throws IOException {
IOUtil.writeToFile(Bytes.of("123"), f1);
f2.createNewFile();
IOUtil.copyFile(f1, f2, false);
assertEquals("", IOUtil.readFileAsString(f2));
IOUtil.copyFile(f1, f2, true);
assertEquals("123", IOUtil.readFileAsString(f2));
}
@Test
public void testReadFileAsLines() throws IOException {
assertTrue(IOUtil.readFileAsLines(f1, CHARSET).isEmpty());
IOUtil.writeToFile(Bytes.of("123\n456\n"), f1);
assertThat(IOUtil.readFileAsLines(f1, CHARSET), contains("123", "456"));
IOUtil.writeToFile(Bytes.of("123\n456\n\n"), f1);
assertThat(IOUtil.readFileAsLines(f1, CHARSET), contains("123", "456", ""));
}
@Test
public void testReadFile() throws IOException {
assertThat(IOUtil.readFile(f1), equalTo(new byte[0]));
IOUtil.writeToFile(Bytes.of("abc"), f1);
assertThat(IOUtil.readFile(f1), equalTo(new byte[] { 'a', 'b', 'c' }));
}
@Test
public void testReadStreamAsString() throws IOException {
IOUtil.writeToFile("abc", f1);
FileInputStream in = new FileInputStream(f1);
assertThat(IOUtil.readStreamAsString(in), equalTo("abc"));
in.close();
}
@After
public void deleteFiles() {
f1.delete();
f2.delete();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import static org.junit.Assert.assertEquals;
import static org.semux.core.Amount.Unit.NANO_SEM;
import java.math.BigDecimal;
import java.text.ParseException;
import java.util.Locale;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.semux.core.Amount;
public class SwingUtilTest {
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
reset();
}
@After
public void tearDown() {
reset();
}
private void reset() {
Locale.setDefault(new Locale("en", "US"));
SwingUtil.setDefaultUnit("SEM");
SwingUtil.setDefaultFractionDigits(3);
}
@Test
public void testFormatNumber() {
BigDecimal x = new BigDecimal("12345678.1234");
assertEquals("12,345,678", SwingUtil.formatNumber(x, 0));
assertEquals("12,345,678.12", SwingUtil.formatNumber(x, 2));
}
@Test
public void testParseNumber() throws ParseException {
assertEquals(new BigDecimal("12345678.12"), SwingUtil.parseNumber("12,345,678.12"));
}
@Test
public void testParseNumberEmpty() throws ParseException {
expectedException.expect(ParseException.class);
SwingUtil.parseNumber("");
}
@Test
public void testParseTimestampEmpty() throws ParseException {
expectedException.expect(ParseException.class);
SwingUtil.parseTimestamp("");
}
@Test
public void testFormatAndEncodeValue() throws ParseException {
Amount x = NANO_SEM.of(1_234_456_789_000L);
assertEquals("1,234.456 SEM", SwingUtil.formatAmount(x));
assertEquals(x, SwingUtil.parseAmount("1,234.456789"));
assertEquals(x, SwingUtil.parseAmount("1,234.456789 SEM"));
assertEquals(x, SwingUtil.parseAmount("1,234,456.789 mSEM"));
assertEquals(x, SwingUtil.parseAmount("1,234,456,789 μSEM"));
}
@Test
public void testFormatValueWithCustomUnit() {
Amount x = NANO_SEM.of(1_234_456_789_123L);
assertEquals("1,234.456 SEM", SwingUtil.formatAmount(x));
SwingUtil.setDefaultUnit("mSEM");
assertEquals("1,234,456.789 mSEM", SwingUtil.formatAmount(x));
SwingUtil.setDefaultUnit("μSEM");
assertEquals("1,234,456,789.123 μSEM", SwingUtil.formatAmount(x));
}
@Test
public void testFormatValueWithCustomFractionDigits() {
Amount x = NANO_SEM.of(1_234_456_789_123L);
SwingUtil.setDefaultUnit("SEM");
SwingUtil.setDefaultFractionDigits(9);
assertEquals("1,234.456789123 SEM", SwingUtil.formatAmount(x));
}
@Test
public void testFormatValueFull() {
Amount x = NANO_SEM.of(1_234_456_789_123L);
SwingUtil.setDefaultFractionDigits(0);
assertEquals("1,234.456789123 SEM", SwingUtil.formatAmountFull(x));
}
@Test
public void testFormatAndEncodePercentage() throws ParseException {
double x = 12.3456;
assertEquals("12.3 %", SwingUtil.formatPercentage(x));
assertEquals(12.3, SwingUtil.parsePercentage("12.3 %"), 10e-9);
}
@Test
public void testNumberComparator() {
// String 1 < String 2
long compareResult1 = SwingUtil.NUMBER_COMPARATOR.compare("1.0000", "2.25");
assertEquals(-1L, compareResult1, 0L);
// String 1 > String 2
long compareResult2 = SwingUtil.NUMBER_COMPARATOR.compare("2.25", "1.0000");
assertEquals(1L, compareResult2, 0L);
// String 1 == String 2
long compareResult3 = SwingUtil.NUMBER_COMPARATOR.compare("1.0000", "1.0000");
assertEquals(0L, compareResult3, 0L);
}
@Test(expected = NumberFormatException.class)
public void testNumberComparatorExceptionPartiallyWrongInput() {
SwingUtil.NUMBER_COMPARATOR.compare("1.0000", "2f.25test");
}
@Test(expected = NumberFormatException.class)
public void testNumberComparatorExceptionTotallyWrongInput() {
SwingUtil.NUMBER_COMPARATOR.compare("1.0000", "worstWord");
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.db;
import org.semux.config.Config;
public interface Migration {
void migrate(Config config, DatabaseFactory dbFactory);
}
<file_sep>Semux API reference is now hosted at https://www.semux.org/assets/semux/api/v1.0.1/<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.windows;
import static org.awaitility.Awaitility.await;
import static org.junit.Assume.assumeTrue;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.semux.util.SystemUtil;
import org.semux.util.SystemUtil.OsName;
@Category(org.semux.windows.WindowsIntegrationTest.class)
public class Launch4jWrapperIT {
private Process l4jWrapper;
private Path l4jLogPath;
@Before
public void setUp() throws IOException {
Path prefix = Paths.get(System.getProperty("user.dir"), "target");
l4jLogPath = Paths.get(prefix.toString(), "launch4j.log");
FileUtils.deleteQuietly(l4jLogPath.toFile());
Path semuxExePath = Paths.get(prefix.toString(), "semux.exe");
ProcessBuilder processBuilder = new ProcessBuilder(semuxExePath.toString(), "").directory(prefix.toFile());
processBuilder.environment().put("Launch4j", "debug");
l4jWrapper = processBuilder.start();
}
@After
public void tearDown() {
l4jWrapper.destroyForcibly();
}
@Test
public void testLaunch4jWrapper() {
assumeTrue(SystemUtil.getOsName() == OsName.WINDOWS);
await().until(() -> l4jLogPath.toFile().exists());
await().until(() -> Files.lines(l4jLogPath).anyMatch(str -> str.matches("Exit code:[\\t\\s]*259")));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core.state;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.semux.core.Amount.ZERO;
import static org.semux.core.Amount.Unit.NANO_SEM;
import java.util.Map;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.semux.config.Constants;
import org.semux.config.DevnetConfig;
import org.semux.core.Blockchain;
import org.semux.core.BlockchainImpl;
import org.semux.core.Genesis.Premine;
import org.semux.rules.TemporaryDatabaseRule;
import org.semux.util.ByteArray;
import org.semux.util.Bytes;
public class AccountStateTest {
private Blockchain chain;
private AccountState state;
@Rule
public TemporaryDatabaseRule temporaryDBFactory = new TemporaryDatabaseRule();
@Before
public void setUp() {
chain = new BlockchainImpl(new DevnetConfig(Constants.DEFAULT_DATA_DIR), temporaryDBFactory);
state = chain.getAccountState();
}
@Test
public void testAtGenesis() {
Map<ByteArray, Premine> premine = chain.getGenesis().getPremines();
for (ByteArray k : premine.keySet()) {
Account acc = state.getAccount(k.getData());
assertEquals(premine.get(k).getAmount(), acc.getAvailable());
}
}
@Test
public void testAccount() {
byte[] address = Bytes.random(20);
Account acc = state.getAccount(address);
acc.setAvailable(NANO_SEM.of(1));
acc.setLocked(NANO_SEM.of(2));
acc.setNonce(3);
Account acc2 = Account.fromBytes(address, acc.toBytes());
assertEquals(NANO_SEM.of(1), acc2.getAvailable());
assertEquals(NANO_SEM.of(2), acc2.getLocked());
assertEquals(3L, acc2.getNonce());
}
@Test
public void testNonExists() {
byte[] address = Bytes.random(20);
Account acc = state.getAccount(address);
assertArrayEquals(address, acc.getAddress());
assertEquals(ZERO, acc.getAvailable());
assertEquals(ZERO, acc.getLocked());
assertEquals(0, acc.getNonce());
}
@Test
public void testAvailable() {
byte[] address = Bytes.random(20);
assertEquals(ZERO, state.getAccount(address).getAvailable());
state.adjustAvailable(address, NANO_SEM.of(20));
assertEquals(NANO_SEM.of(20), state.getAccount(address).getAvailable());
AccountState state2 = state.track();
assertEquals(NANO_SEM.of(20), state2.getAccount(address).getAvailable());
state.rollback();
assertEquals(ZERO, state2.getAccount(address).getAvailable());
}
@Test
public void testLocked() {
byte[] address = Bytes.random(20);
assertEquals(ZERO, state.getAccount(address).getLocked());
state.adjustLocked(address, NANO_SEM.of(20));
assertEquals(NANO_SEM.of(20), state.getAccount(address).getLocked());
AccountState state2 = state.track();
assertEquals(NANO_SEM.of(20), state2.getAccount(address).getLocked());
state.rollback();
assertEquals(ZERO, state2.getAccount(address).getLocked());
}
@Test
public void testNonce() {
byte[] address = Bytes.random(20);
assertEquals(0, state.getAccount(address).getNonce());
state.increaseNonce(address);
assertEquals(1, state.getAccount(address).getNonce());
AccountState state2 = state.track();
assertEquals(1, state2.getAccount(address).getNonce());
state.rollback();
assertEquals(0, state2.getAccount(address).getNonce());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.dialog;
import org.semux.KernelMock;
import org.semux.core.Transaction;
import org.semux.gui.BaseTestApplication;
import org.semux.gui.SemuxGui;
import org.semux.gui.model.WalletModel;
public class TransactionDialogTestApplication extends BaseTestApplication {
private static final long serialVersionUID = 1L;
SemuxGui gui;
TransactionDialog transactionDialog;
TransactionDialogTestApplication(WalletModel walletModel, Transaction tx, KernelMock kernelMock) {
super();
gui = new SemuxGui(walletModel, kernelMock);
transactionDialog = new TransactionDialog(this, tx);
transactionDialog.setVisible(true);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.bench;
import static org.semux.core.Amount.Unit.MILLI_SEM;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.semux.config.Constants;
import org.semux.config.DevnetConfig;
import org.semux.util.ApiClient;
import org.semux.util.Bytes;
import org.semux.util.ConsoleUtil;
public class SemuxPerformance {
private static InetSocketAddress server = new InetSocketAddress("127.0.0.1", 5171);
private static String username = "";
private static String password = "";
private static String address = "";
private static int tps = 500;
public static void testTransfer(int n) throws IOException, InterruptedException {
DevnetConfig config = new DevnetConfig(Constants.DEFAULT_DATA_DIR);
long t1 = System.currentTimeMillis();
for (int i = 1; i <= n; i++) {
Map<String, Object> params = new HashMap<>();
params.put("from", address);
params.put("to", address);
params.put("value", MILLI_SEM.of(1));
params.put("fee", config.minTransactionFee());
params.put("data", Bytes.EMPTY_BYTES);
params.put("password", <PASSWORD>);
ApiClient api = new ApiClient(server, username, password);
String response = api.request("transfer", params);
if (!response.contains("\"success\":true")) {
System.out.println(response);
return;
}
if (i % tps == 0) {
System.out.println(new SimpleDateFormat("[HH:mm:ss]").format(new Date()) + " " + i);
long t2 = System.currentTimeMillis();
Thread.sleep(Math.max(0, 1000 - (t2 - t1)));
t1 = t2;
}
}
}
public static void main(String[] args) throws Exception {
address = ConsoleUtil.readPassword("Please enter your wallet address: ");
username = ConsoleUtil.readPassword("Please enter your API username: ");
password = ConsoleUtil.readPassword("Please enter your API password: ");
while (true) {
int n = Integer.parseInt(ConsoleUtil.readLine("# transactions to send: ").replaceAll("[^\\d]", ""));
if (n > 0) {
testTransfer(n);
} else {
break;
}
}
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg;
import org.junit.Test;
public class MessageFactoryTest {
@Test(expected = MessageException.class)
public void testNonExist() throws MessageException {
MessageFactory factory = new MessageFactory();
factory.create((byte) 0xff, new byte[1]);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.Graphics;
import java.time.Duration;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JProgressBar;
import javax.swing.SwingConstants;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.semux.core.SyncManager;
import org.semux.message.GuiMessages;
/**
* StatusBar represents a UI component displaying current wallet status like
* sync progress and number of peers.
*/
public class StatusBar extends JPanel {
private static final long serialVersionUID = 2676757102891632156L;
private final JLabel peers = new JLabel();
private final JProgressBar syncProgressBar = new JProgressBar();
public StatusBar(Frame parent) {
super();
init(parent);
}
/**
* Initialize UI components.
*
* @param parent
* parent frame.
*/
private void init(Frame parent) {
setBorder(BorderFactory.createMatteBorder(1, 0, 0, 0, Color.LIGHT_GRAY));
setPreferredSize(new Dimension(parent.getWidth(), getFontMetrics(getFont()).getHeight() + 10));
setLayout(new BoxLayout(this, BoxLayout.X_AXIS));
addGap(10);
// add number of peers
JLabel peersLabel = new JLabel(GuiMessages.get("Peers") + ":");
peersLabel.setHorizontalAlignment(SwingConstants.LEFT);
add(peersLabel);
addGap(5);
peers.setName("peers");
peers.setHorizontalAlignment(SwingConstants.LEFT);
add(peers);
addSeparator();
// add progress bar
JLabel syncProgressLabel = new JLabel(GuiMessages.get("SyncProgress") + ":");
syncProgressLabel.setHorizontalAlignment(SwingConstants.LEFT);
add(syncProgressLabel);
addGap(5);
JPanel progressBarPanel = new JPanel();
progressBarPanel.setLayout(new BoxLayout(progressBarPanel, BoxLayout.X_AXIS));
progressBarPanel.setMaximumSize(new Dimension(500, getFontMetrics(getFont()).getHeight()));
syncProgressBar.setMaximum(10000);
syncProgressBar.setAlignmentY(CENTER_ALIGNMENT);
syncProgressBar.setStringPainted(true);
progressBarPanel.add(syncProgressBar);
add(progressBarPanel);
addSeparator();
}
/**
* Update progress bar and estimated time.
*
* @param progress
* current progress.
*/
public void setProgress(SyncManager.Progress progress) {
syncProgressBar.setValue(
(int) Math.round((double) progress.getCurrentHeight() / (double) progress.getTargetHeight() * 10000d));
Duration estimation = progress.getSyncEstimation();
if (estimation != null && estimation.getSeconds() > 0L) {
syncProgressBar.setString(String.format(
"%s (%s)",
SyncProgressFormatter.format(progress),
// TODO: localize estimation
estimation.toDays() >= 30 ? ">= 1 month"
: DurationFormatUtils.formatDurationWords(
estimation.toMillis(), true, true)));
} else {
syncProgressBar.setString(SyncProgressFormatter.format(progress));
}
}
/**
* Update the number of peers.
*
* @param peersNumber
* current number of peers.
*/
public void setPeersNumber(int peersNumber) {
peers.setText(SwingUtil.formatNumber(peersNumber));
}
private void addGap(int width) {
add(Box.createRigidArea(new Dimension(width, 0)));
}
private void addSeparator() {
SeparatorPanel separator = new SeparatorPanel();
separator.setMaximumSize(new Dimension(1, getFontMetrics(getFont()).getHeight()));
addGap(20);
add(separator);
addGap(20);
}
/**
* Syncing progress formatter.
*/
protected static class SyncProgressFormatter {
private SyncProgressFormatter() {
}
public static String format(SyncManager.Progress progress) {
if (progress == null) {
return GuiMessages.get("SyncStopped");
} else if (progress.getCurrentHeight() > 0 && progress.getCurrentHeight() == progress.getTargetHeight()) {
return GuiMessages.get("SyncFinished");
} else if (progress.getTargetHeight() > 0) {
return SwingUtil.formatPercentage(
(double) progress.getCurrentHeight() / (double) progress.getTargetHeight() * 100d, 2);
} else {
return GuiMessages.get("SyncStopped");
}
}
}
private static final class SeparatorPanel extends JPanel {
private static final long serialVersionUID = -5802537037684892071L;
private final Color leftColor;
private final Color rightColor;
private SeparatorPanel() {
this.leftColor = Color.GRAY;
this.rightColor = Color.WHITE;
setOpaque(false);
}
@Override
protected void paintComponent(Graphics g) {
g.setColor(leftColor);
g.drawLine(0, 0, 0, getHeight());
g.setColor(rightColor);
g.drawLine(1, 0, 1, getHeight());
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import java.util.ArrayList;
import java.util.List;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.crypto.Hash;
public class MerkleUtil {
/**
* Compute the Merkle root of transactions.
*
* @param txs
* transactions
* @return
*/
public static byte[] computeTransactionsRoot(List<Transaction> txs) {
List<byte[]> hashes = new ArrayList<>();
for (Transaction tx : txs) {
hashes.add(tx.getHash());
}
return new MerkleTree(hashes).getRootHash();
}
/**
* Computes the Merkle root of results.
*
* @param results
* transaction results
* @return
*/
public static byte[] computeResultsRoot(List<TransactionResult> results) {
List<byte[]> hashes = new ArrayList<>();
for (TransactionResult tx : results) {
hashes.add(Hash.h256(tx.toBytes()));
}
return new MerkleTree(hashes).getRootHash();
}
private MerkleUtil() {
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.db;
import java.nio.file.Path;
import java.util.List;
import java.util.Map.Entry;
import org.apache.commons.lang3.tuple.Pair;
import org.semux.util.ClosableIterator;
/**
* Key-value database.
*
*/
public interface Database {
/**
* Returns the value that is mapped to the specified key.
*
* @param key
* @return
*/
byte[] get(byte[] key);
/**
* Associates a value to the specified key.
*
*
*
* @param key
* @param value
* can not be null
*/
void put(byte[] key, byte[] value);
/**
* Deletes the specified key value pair if present.
*
*
* @param key
*/
void delete(byte[] key);
/**
* Updates a list of key value pairs.
*
* @param pairs
* key value pairs; pair with null value, will be deleted
*/
void updateBatch(List<Pair<byte[], byte[]>> pairs);
/**
* Returns all the keys.<br>
* <br>
* NOTE: be sure to close the iterator after iteration.
*
* @return
*/
ClosableIterator<Entry<byte[], byte[]>> iterator();
/**
* Returns all the keys which has the given prefix.<br>
* <br>
* NOTE: be sure to close the iterator after iteration.
*
* @return
*/
ClosableIterator<Entry<byte[], byte[]>> iterator(byte[] prefix);
/**
* Closes the database.
*/
void close();
/**
* Destroys this DB.
*
*/
void destroy();
/**
* Returns the data directory of this database.
*
* @return
*/
Path getDataDir();
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import java.awt.Dimension;
import org.semux.gui.model.WalletModel;
public class SplashScreenTestApplication extends BaseTestApplication {
private static final long serialVersionUID = 7961392121592436000L;
protected SplashScreen splashScreen;
protected WalletModel walletModel;
SplashScreenTestApplication() {
super();
this.setMinimumSize(new Dimension(960, 600));
splashScreen = new SplashScreen(walletModel = new WalletModel());
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import java.util.Base64;
import org.apache.commons.lang3.tuple.Pair;
/**
* Basic authentication helper.
*
*/
public class BasicAuth {
/**
* Parses the username and password from the AUTHORIZATION header.
*
* @param auth
* @return a pair of username and password if success, otherwise null
*/
public static Pair<String, String> parseAuth(String auth) {
try {
if (auth != null && auth.startsWith("Basic ")) {
String str = Bytes.toString(Base64.getDecoder().decode(auth.substring(6)));
int idx = str.indexOf(':');
if (idx != -1) {
return Pair.of(str.substring(0, idx), str.substring(idx + 1));
}
}
} catch (IllegalArgumentException e) {
// invalid base64 string
}
return null;
}
/**
* Generates the AUTHORIZATION header.
*
* @param username
* @param password
* @return
*/
public static String generateAuth(String username, String password) {
return "Basic " + Base64.getEncoder().encodeToString(Bytes.of(username + ":" + password));
}
private BasicAuth() {
}
}
<file_sep>Hi there,
Please note that this is an issue tracker reserved for bug reports and feature requests.
For general questions please use the Discord channel here: https://discord.gg/qQVckKZ
#### System information
Semux version: `e.g., v1.0.0-rc.1`
OS & Version: `e.g., Windows 7, Ubuntu 16.04, macOS 10.12.6`
Commit hash : `git rev-parse HEAD` (optional)
#### Expected behaviour
Description
#### Actual behaviour
Description, screenshot allowed
#### Steps to reproduce the behaviour
1. Step one
2. Step two
#### Backtrace
````
[backtrace]
````
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
public interface SemuxEventListener {
void onSemuxEvent(SemuxEvent event);
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.bench;
import java.io.IOException;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.config.MainnetConfig;
import org.semux.core.Block;
import org.semux.core.Blockchain;
import org.semux.core.BlockchainImpl;
import org.semux.db.LeveldbDatabase.LevelDbFactory;
import org.semux.net.msg.consensus.BlockMessage;
import org.xerial.snappy.Snappy;
public class CompressPerformance {
enum Mode {
ALL_BLOCKS, BLOCKS_WITH_TX
}
public static void main(String[] args) throws IOException {
Config config = new MainnetConfig(Constants.DEFAULT_DATA_DIR);
LevelDbFactory dbFactory = new LevelDbFactory(config.databaseDir());
Blockchain chain = new BlockchainImpl(config, dbFactory);
for (Mode mode : Mode.values()) {
int blocks = 0;
int transactions = 0;
int size = 0;
int sizeCompressed = 0;
long time = 0;
for (int i = 1; i <= chain.getLatestBlockNumber(); i++) {
Block b = chain.getBlock(i);
BlockMessage m = new BlockMessage(b);
if (mode == Mode.BLOCKS_WITH_TX && b.getTransactions().isEmpty()) {
continue;
}
blocks++;
transactions += b.getTransactions().size();
size += m.getEncoded().length;
long t1 = System.nanoTime();
sizeCompressed += Snappy.compress(m.getEncoded()).length;
long t2 = System.nanoTime();
time += t2 - t1;
}
System.out.println("======================================");
System.out.println(mode);
System.out.println("======================================");
System.out.println("# of blocks : " + blocks);
System.out.println("# of transactions: " + transactions);
System.out.println("Raw size : " + size + " bytes");
System.out.println("Compressed size : " + sizeCompressed + " bytes");
System.out.println("Ratio : " + (100.0 * sizeCompressed / size) + " %");
System.out.println("Total time used : " + time + " ns");
System.out.println("Average time used: " + time / blocks + " ns");
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.crypto.Hash;
import org.semux.crypto.Key;
import org.semux.util.ByteArray;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
public class VoteTest {
@Test
public void testVote() {
long height = Long.MAX_VALUE;
int view = Integer.MAX_VALUE;
Vote vote = Vote.newApprove(VoteType.COMMIT, height, view, Bytes.EMPTY_HASH);
assertFalse(vote.validate());
vote.sign(new Key());
assertTrue(vote.validate());
Vote vote2 = Vote.fromBytes(vote.toBytes());
assertEquals(VoteType.COMMIT, vote2.getType());
assertEquals(height, vote2.getHeight());
assertEquals(view, vote2.getView());
assertArrayEquals(Bytes.EMPTY_HASH, vote2.getBlockHash());
}
@Test
public void testValidate() {
VoteType type = VoteType.COMMIT;
long height = 1;
int view = 0;
byte[] blockHash = Bytes.EMPTY_HASH;
Vote v = new Vote(type, false, height, view, blockHash);
assertFalse(v.validate());
v.sign(new Key());
assertTrue(v.validate());
}
@Test
public void testVotesSerialization() {
Key key1 = new Key();
Key key2 = new Key();
List<Transaction> transactions = new ArrayList<>();
List<TransactionResult> results = new ArrayList<>();
long number = 1;
byte[] coinbase = key1.toAddress();
byte[] prevHash = Bytes.EMPTY_HASH;
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(transactions);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(results);
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
int view = 1;
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
Block block = new Block(header, transactions, results);
List<Key.Signature> votes = new ArrayList<>();
Vote vote = new Vote(VoteType.PRECOMMIT, Vote.VALUE_APPROVE, block.getNumber(), view, block.getHash())
.sign(key1);
votes.add(vote.getSignature());
vote = new Vote(VoteType.PRECOMMIT, Vote.VALUE_APPROVE, block.getNumber(), view, block.getHash()).sign(key2);
votes.add(vote.getSignature());
block.setView(view);
block.setVotes(votes);
block = Block.fromBytes(block.toBytesHeader(), block.toBytesTransactions(), block.toBytesResults(),
block.toBytesVotes());
for (Key.Signature sig : block.getVotes()) {
ByteArray address = ByteArray.of(Hash.h160(sig.getPublicKey()));
assertTrue(
address.equals(ByteArray.of(key1.toAddress())) || address.equals(ByteArray.of(key2.toAddress())));
assertTrue(Key.verify(vote.getEncoded(), sig));
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.panel;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.border.LineBorder;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import org.semux.core.Transaction;
import org.semux.gui.Action;
import org.semux.gui.SemuxGui;
import org.semux.gui.SwingUtil;
import org.semux.gui.dialog.TransactionDialog;
import org.semux.gui.model.WalletAccount;
import org.semux.gui.model.WalletModel;
import org.semux.message.GuiMessages;
import org.semux.util.ByteArray;
import org.semux.util.exception.UnreachableException;
/**
* Transactions panel displays all transaction from/to accounts of the wallet.
*/
public class TransactionsPanel extends JPanel implements ActionListener {
private static final long serialVersionUID = 1L;
private static final String[] columnNames = { GuiMessages.get("Type"), GuiMessages.get("FromTo"),
GuiMessages.get("Value"), GuiMessages.get("Time"), GuiMessages.get("Status") };
private transient final SemuxGui gui;
private transient final WalletModel model;
private JTable table;
private TransactionsTableModel tableModel;
public TransactionsPanel(SemuxGui gui, JFrame frame) {
this.gui = gui;
this.model = gui.getModel();
this.model.addListener(this);
setLayout(new BorderLayout(0, 0));
tableModel = new TransactionsTableModel();
table = new JTable(tableModel);
table.setName("transactionsTable");
table.setBackground(Color.WHITE);
table.setFillsViewportHeight(true);
table.setGridColor(Color.LIGHT_GRAY);
table.setRowHeight(25);
table.getTableHeader().setPreferredSize(new Dimension(10000, 24));
SwingUtil.setColumnWidths(table, 800, 0.1, 0.4, 0.15, 0.2, 0.15);
SwingUtil.setColumnAlignments(table, false, false, true, true, true);
table.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent me) {
JTable sourceTable = (JTable) me.getSource();
Point p = me.getPoint();
int row = sourceTable.rowAtPoint(p);
if (me.getClickCount() == 2 && row != -1) {
Transaction tx = tableModel.getRow(sourceTable.convertRowIndexToModel(row));
if (tx != null) {
TransactionDialog dialog = new TransactionDialog(frame, tx);
dialog.setVisible(true);
}
}
}
});
// customized table sorter
TableRowSorter<TableModel> sorter = new TableRowSorter<>(table.getModel());
sorter.setComparator(2, SwingUtil.VALUE_COMPARATOR);
sorter.setComparator(3, SwingUtil.TIMESTAMP_COMPARATOR);
table.setRowSorter(sorter);
JScrollPane scrollPane = new JScrollPane(table);
scrollPane.setBorder(new LineBorder(Color.LIGHT_GRAY));
add(scrollPane);
refresh();
}
class TransactionsTableModel extends AbstractTableModel {
private static final long serialVersionUID = 1L;
private transient List<StatusTransaction> transactions;
public TransactionsTableModel() {
this.transactions = Collections.emptyList();
}
public void setData(List<StatusTransaction> transactions) {
this.transactions = transactions;
this.fireTableDataChanged();
}
public Transaction getRow(int row) {
if (row >= 0 && row < transactions.size()) {
return transactions.get(row).getTransaction();
}
return null;
}
@Override
public int getRowCount() {
return transactions.size();
}
@Override
public int getColumnCount() {
return columnNames.length;
}
@Override
public String getColumnName(int column) {
return columnNames[column];
}
@Override
public Object getValueAt(int row, int column) {
StatusTransaction tx = transactions.get(row);
switch (column) {
case 0:
return tx.getTransaction().getType().name();
case 1:
return SwingUtil.getTransactionDescription(gui, tx.getTransaction());
case 2:
return SwingUtil.formatAmount(tx.getTransaction().getValue());
case 3:
return SwingUtil.formatTimestamp(tx.getTransaction().getTimestamp());
case 4:
return tx.getStatus();
default:
return null;
}
}
}
@Override
public synchronized void actionPerformed(ActionEvent e) {
Action action = Action.valueOf(e.getActionCommand());
switch (action) {
case REFRESH:
refresh();
break;
default:
throw new UnreachableException();
}
}
/**
* Refreshes this panel.
*/
protected void refresh() {
List<StatusTransaction> transactions = new ArrayList<>();
// add pending transactions
transactions.addAll(gui.getKernel().getPendingManager().getPendingTransactions()
.parallelStream()
.filter(pendingTx -> {
for (WalletAccount acc : model.getAccounts()) {
if (Arrays.equals(acc.getAddress(), pendingTx.transaction.getFrom()) ||
Arrays.equals(acc.getAddress(), pendingTx.transaction.getTo())) {
return true;
}
}
return false;
})
.map(pendingTx -> new StatusTransaction(pendingTx.transaction, GuiMessages.get("Pending")))
.collect(Collectors.toList()));
// add completed transactions
Set<ByteArray> hashes = new HashSet<>();
for (WalletAccount acc : model.getAccounts()) {
for (Transaction tx : acc.getTransactions()) {
ByteArray key = ByteArray.of(tx.getHash());
if (!hashes.contains(key)) {
transactions.add(new StatusTransaction(tx, GuiMessages.get("Completed")));
hashes.add(key);
}
}
}
transactions.sort(
(tx1, tx2) -> Long.compare(tx2.getTransaction().getTimestamp(), tx1.getTransaction().getTimestamp()));
/*
* update table model
*/
Transaction tx = getSelectedTransaction();
tableModel.setData(transactions);
if (tx != null) {
for (int i = 0; i < transactions.size(); i++) {
if (Arrays.equals(tx.getHash(), transactions.get(i).getTransaction().getHash())) {
table.setRowSelectionInterval(table.convertRowIndexToView(i), table.convertRowIndexToView(i));
break;
}
}
}
}
/**
* Returns the selected transaction.
*
* @return
*/
protected Transaction getSelectedTransaction() {
int row = table.getSelectedRow();
return (row != -1) ? tableModel.getRow(table.convertRowIndexToModel(row)) : null;
}
private static class StatusTransaction {
private String status;
private Transaction transaction;
public StatusTransaction(Transaction transaction, String status) {
this.transaction = transaction;
this.status = status;
}
public String getStatus() {
return status;
}
public Transaction getTransaction() {
return transaction;
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api.http;
import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST;
import static io.netty.handler.codec.http.HttpResponseStatus.CONTINUE;
import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.tuple.Pair;
import org.semux.api.ApiHandler;
import org.semux.api.ApiHandlerResponse;
import org.semux.config.Config;
import org.semux.util.BasicAuth;
import org.semux.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.DecoderResult;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpContent;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpObject;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.util.CharsetUtil;
/**
* HTTP handler for Semux API.
*
*/
public class HttpHandler extends SimpleChannelInboundHandler<Object> {
private static final Logger logger = LoggerFactory.getLogger(HttpHandler.class);
private static final int MAX_BODY_SIZE = 512 * 1024; // 512KB
private static final Charset CHARSET = CharsetUtil.UTF_8;
private static ObjectMapper objectMapper = new ObjectMapper();
private Config config;
private ApiHandler apiHandler;
private boolean keepAlive;
private String uri;
private Map<String, List<String>> params;
private HttpHeaders headers;
private ByteBuf body;
private ApiHandlerResponse response = null;
private HttpResponseStatus status;
public HttpHandler(Config config, ApiHandler apiHandler) {
this.config = config;
this.apiHandler = apiHandler;
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof HttpRequest) {
HttpRequest request = (HttpRequest) msg;
if (HttpUtil.is100ContinueExpected(request)) {
send100Continue(ctx);
}
keepAlive = HttpUtil.isKeepAlive(request);
uri = request.uri();
// copy collection to ensure it is writable
params = new HashMap<>(new QueryStringDecoder(request.uri(), CHARSET).parameters());
headers = request.headers();
body = Unpooled.buffer(MAX_BODY_SIZE);
checkDecoderResult(request);
}
if (msg instanceof HttpContent) {
HttpContent httpContent = (HttpContent) msg;
ByteBuf content = httpContent.content();
int length = content.readableBytes();
if (length > 0) {
body.writeBytes(content, length);
}
if (msg instanceof LastHttpContent) {
LastHttpContent trailer = (LastHttpContent) msg;
checkDecoderResult(trailer);
// trailing headers are ignored
// basic authentication
if (!checkBasicAuth(headers)) {
FullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1, HttpResponseStatus.UNAUTHORIZED);
resp.headers().set(HttpHeaderNames.WWW_AUTHENTICATE, "Basic realm=\"Semux RESTful API\"");
resp.headers().set(HttpHeaderNames.CONTENT_LENGTH, resp.content().readableBytes());
if (keepAlive) {
resp.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
}
ctx.write(resp);
return;
}
// process uri
if (uri.contains("?")) {
uri = uri.substring(0, uri.indexOf('?'));
}
// parse parameter from body
if ("application/x-www-form-urlencoded".equals(headers.get("Content-type"))
&& body.readableBytes() > 0) {
QueryStringDecoder decoder = new QueryStringDecoder("?" + body.toString(CHARSET));
Map<String, List<String>> map = decoder.parameters();
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
if (params.containsKey(entry.getKey())) {
params.get(entry.getKey()).addAll(entry.getValue());
} else {
params.put(entry.getKey(), entry.getValue());
}
}
}
// filter parameters
Map<String, String> map = new HashMap<>();
for (Map.Entry<String, List<String>> entry : params.entrySet()) {
List<String> v = entry.getValue();
// duplicate names are not allowed.
if (!v.isEmpty()) {
map.put(entry.getKey(), v.get(0));
}
}
// delegate the request to api handler if a response has not been generated
if (response == null) {
response = apiHandler.service(uri, map, headers);
status = HttpResponseStatus.OK;
}
boolean prettyPrint = Boolean.valueOf(map.get("pretty"));
// write response
String responseString;
try {
if (prettyPrint) {
responseString = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(response);
} else {
responseString = objectMapper.writeValueAsString(response);
}
} catch (JsonProcessingException e) {
responseString = "{\"success\":false,\"message\":\"Internal server error\"}";
}
if (!writeResponse(ctx, status, responseString)) {
// if keep-alive is off, close the connection after flushing
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
}
}
}
private void checkDecoderResult(HttpObject o) {
DecoderResult result = o.decoderResult();
if (result.isSuccess()) {
return;
}
response = new ApiHandlerResponse(false, BAD_REQUEST.toString());
status = BAD_REQUEST;
}
private void reset() {
response = null;
status = null;
}
private boolean checkBasicAuth(HttpHeaders headers) {
Pair<String, String> auth = BasicAuth.parseAuth(headers.get(HttpHeaderNames.AUTHORIZATION));
return auth != null
&& MessageDigest.isEqual(Bytes.of(auth.getLeft()), Bytes.of(config.apiUsername()))
&& MessageDigest.isEqual(Bytes.of(auth.getRight()), Bytes.of(config.apiPassword()));
}
private boolean writeResponse(ChannelHandlerContext ctx, HttpResponseStatus status, String responseBody) {
// construct a HTTP response
FullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1, status,
Unpooled.copiedBuffer(responseBody == null ? "" : responseBody, CHARSET));
// set response headers
resp.headers().set(HttpHeaderNames.CONTENT_TYPE, "application/json; charset=UTF-8");
resp.headers().set(HttpHeaderNames.CONTENT_LENGTH, resp.content().readableBytes());
if (keepAlive) {
resp.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
}
// write response
ctx.write(resp);
reset();
return keepAlive;
}
private static void send100Continue(ChannelHandlerContext ctx) {
FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, CONTINUE);
ctx.write(response);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
logger.debug("Exception in API http handler", cause);
ctx.close();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.consensus;
import org.semux.net.msg.Message;
import org.semux.net.msg.MessageCode;
import org.semux.util.Bytes;
public class NewHeightMessage extends Message {
private long height;
public NewHeightMessage(long height) {
super(MessageCode.BFT_NEW_HEIGHT, null);
this.height = height;
this.encoded = Bytes.of(height);
}
public NewHeightMessage(byte[] encoded) {
super(MessageCode.BFT_NEW_HEIGHT, null);
this.encoded = encoded;
this.height = Bytes.toLong(encoded);
}
public long getHeight() {
return height;
}
@Override
public String toString() {
return "BFTNewHeightMessage [height=" + height + "]";
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static org.semux.consensus.ValidatorActivatedFork.UNIFORM_DISTRIBUTION;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.consensus.ValidatorActivatedFork;
import org.semux.core.Genesis.Premine;
import org.semux.core.exception.BlockchainException;
import org.semux.core.state.AccountState;
import org.semux.core.state.AccountStateImpl;
import org.semux.core.state.Delegate;
import org.semux.core.state.DelegateState;
import org.semux.core.state.DelegateStateImpl;
import org.semux.crypto.Hex;
import org.semux.db.Database;
import org.semux.db.DatabaseFactory;
import org.semux.db.DatabaseName;
import org.semux.db.Migration;
import org.semux.db.TempDatabaseFactory;
import org.semux.util.Bytes;
import org.semux.util.FileUtil;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
/**
* Blockchain implementation.
*
* <pre>
* index DB structure:
*
* [0] => [latest_block_number]
* [1] => [validators]
* [2, address] => [validator_stats]
*
* [3, block_hash] => [block_number]
* [4, transaction_hash] => [block_number, from, to] | [coinbase_transaction]
* [5, address, n] => [transaction_hash]
* [7] => [activated forks]
*
* [0xff] => [database version]
* </pre>
*
* <pre>
* block DB structure:
*
* [0, block_number] => [block_header]
* [1, block_number] => [block_transactions]
* [2, block_number] => [block_results]
* [3, block_number] => [block_votes]
* </pre>
*
*/
public class BlockchainImpl implements Blockchain {
private static final Logger logger = LoggerFactory.getLogger(BlockchainImpl.class);
protected static final int DATABASE_VERSION = 1;
protected static final byte TYPE_LATEST_BLOCK_NUMBER = 0x00;
protected static final byte TYPE_VALIDATORS = 0x01;
protected static final byte TYPE_VALIDATOR_STATS = 0x02;
protected static final byte TYPE_BLOCK_HASH = 0x03;
protected static final byte TYPE_TRANSACTION_HASH = 0x04;
protected static final byte TYPE_ACCOUNT_TRANSACTION = 0x05;
protected static final byte TYPE_ACTIVATED_FORKS = 0x06;
protected static final byte TYPE_DATABASE_VERSION = (byte) 0xff;
protected static final byte TYPE_BLOCK_HEADER = 0x00;
protected static final byte TYPE_BLOCK_TRANSACTIONS = 0x01;
protected static final byte TYPE_BLOCK_RESULTS = 0x02;
protected static final byte TYPE_BLOCK_VOTES = 0x03;
protected enum StatsType {
FORGED, HIT, MISSED
}
private final Config config;
private Database indexDB;
private Database blockDB;
private AccountState accountState;
private DelegateState delegateState;
private Genesis genesis;
private Block latestBlock;
private final List<BlockchainListener> listeners = new ArrayList<>();
/**
* Activated forks at current height.
*/
private Map<ValidatorActivatedFork, ValidatorActivatedFork.Activation> activatedForks = new ConcurrentHashMap<>();
/**
* Cache of <code>(fork, height) -> activated blocks</code>. As there's only one
* fork in this version, 2 slots are reserved for current height and current
* height - 1.
*/
private Cache<ImmutablePair<ValidatorActivatedFork, Long>, ForkActivationMemory> forkActivationMemoryCache = Caffeine
.newBuilder()
.maximumSize(2)
.build();
/**
* Create a blockchain instance.
*
* @param config
* @param dbFactory
*/
public BlockchainImpl(Config config, DatabaseFactory dbFactory) {
this.config = config;
openDb(dbFactory);
}
private synchronized void openDb(DatabaseFactory factory) {
this.indexDB = factory.getDB(DatabaseName.INDEX);
this.blockDB = factory.getDB(DatabaseName.BLOCK);
this.accountState = new AccountStateImpl(factory.getDB(DatabaseName.ACCOUNT));
this.delegateState = new DelegateStateImpl(this, factory.getDB(DatabaseName.DELEGATE),
factory.getDB(DatabaseName.VOTE));
this.genesis = Genesis.load(config.network());
// checks if the database needs to be initialized
byte[] number = indexDB.get(Bytes.of(TYPE_LATEST_BLOCK_NUMBER));
if (number == null || number.length == 0) {
initializeDb();
return;
}
// load version 0 index
latestBlock = getBlock(Bytes.toLong(number));
// checks if the database needs to be upgraded
if (getDatabaseVersion() == 0) {
upgradeDb0(factory);
return;
}
// load version 1 index
activatedForks = getActivatedForks();
}
private void initializeDb() {
// initialize database version
indexDB.put(getDatabaseVersionKey(), Bytes.of(DATABASE_VERSION));
// initialize activated forks
setActivatedForks(new HashMap<>());
// pre-allocation
for (Premine p : genesis.getPremines().values()) {
accountState.adjustAvailable(p.getAddress(), p.getAmount());
}
accountState.commit();
// delegates
for (Entry<String, byte[]> e : genesis.getDelegates().entrySet()) {
delegateState.register(e.getValue(), Bytes.of(e.getKey()), 0);
}
delegateState.commit();
// add block
addBlock(genesis);
}
/**
* Upgrade this database from version 0 to version 1.
*
* @param dbFactory
*/
private void upgradeDb0(DatabaseFactory dbFactory) {
// run the migration
new MigrationBlockDbVersion001().migrate(config, dbFactory);
// reload this blockchain database
openDb(dbFactory);
}
@Override
public AccountState getAccountState() {
return accountState;
}
@Override
public DelegateState getDelegateState() {
return delegateState;
}
@Override
public Block getLatestBlock() {
return latestBlock;
}
@Override
public long getLatestBlockNumber() {
return latestBlock.getNumber();
}
@Override
public byte[] getLatestBlockHash() {
return latestBlock.getHash();
}
@Override
public long getBlockNumber(byte[] hash) {
byte[] number = indexDB.get(Bytes.merge(TYPE_BLOCK_HASH, hash));
return (number == null) ? -1 : Bytes.toLong(number);
}
@Override
public Block getBlock(long number) {
byte[] header = blockDB.get(Bytes.merge(TYPE_BLOCK_HEADER, Bytes.of(number)));
byte[] transactions = blockDB.get(Bytes.merge(TYPE_BLOCK_TRANSACTIONS, Bytes.of(number)));
byte[] results = blockDB.get(Bytes.merge(TYPE_BLOCK_RESULTS, Bytes.of(number)));
byte[] votes = blockDB.get(Bytes.merge(TYPE_BLOCK_VOTES, Bytes.of(number)));
return (header == null) ? null : Block.fromBytes(header, transactions, results, votes);
}
@Override
public Block getBlock(byte[] hash) {
long number = getBlockNumber(hash);
return (number == -1) ? null : getBlock(number);
}
@Override
public BlockHeader getBlockHeader(long number) {
byte[] header = blockDB.get(Bytes.merge(TYPE_BLOCK_HEADER, Bytes.of(number)));
return (header == null) ? null : BlockHeader.fromBytes(header);
}
@Override
public BlockHeader getBlockHeader(byte[] hash) {
long number = getBlockNumber(hash);
return (number == -1) ? null : getBlockHeader(number);
}
@Override
public boolean hasBlock(long number) {
return blockDB.get(Bytes.merge(TYPE_BLOCK_HEADER, Bytes.of(number))) != null;
}
@Override
public Transaction getTransaction(byte[] hash) {
byte[] bytes = indexDB.get(Bytes.merge(TYPE_TRANSACTION_HASH, hash));
if (bytes != null) {
// coinbase transaction
if (bytes.length > 64) {
return Transaction.fromBytes(bytes);
}
SimpleDecoder dec = new SimpleDecoder(bytes);
long number = dec.readLong();
int start = dec.readInt();
dec.readInt();
byte[] transactions = blockDB.get(Bytes.merge(TYPE_BLOCK_TRANSACTIONS, Bytes.of(number)));
dec = new SimpleDecoder(transactions, start);
return Transaction.fromBytes(dec.readBytes());
}
return null;
}
@Override
public boolean hasTransaction(final byte[] hash) {
return indexDB.get(Bytes.merge(TYPE_TRANSACTION_HASH, hash)) != null;
}
@Override
public TransactionResult getTransactionResult(byte[] hash) {
byte[] bytes = indexDB.get(Bytes.merge(TYPE_TRANSACTION_HASH, hash));
if (bytes != null) {
// coinbase transaction
if (bytes.length > 64) {
return new TransactionResult(true);
}
SimpleDecoder dec = new SimpleDecoder(bytes);
long number = dec.readLong();
dec.readInt();
int start = dec.readInt();
byte[] results = blockDB.get(Bytes.merge(TYPE_BLOCK_RESULTS, Bytes.of(number)));
dec = new SimpleDecoder(results, start);
return TransactionResult.fromBytes(dec.readBytes());
}
return null;
}
@Override
public long getTransactionBlockNumber(byte[] hash) {
Transaction tx = getTransaction(hash);
if (tx.getType() == TransactionType.COINBASE) {
return tx.getNonce();
}
byte[] bytes = indexDB.get(Bytes.merge(TYPE_TRANSACTION_HASH, hash));
if (bytes != null) {
SimpleDecoder dec = new SimpleDecoder(bytes);
return dec.readLong();
}
return -1;
}
@Override
public synchronized void addBlock(Block block) {
long number = block.getNumber();
byte[] hash = block.getHash();
activateForks(number);
if (number != genesis.getNumber() && number != latestBlock.getNumber() + 1) {
logger.error("Adding wrong block: number = {}, expected = {}", number, latestBlock.getNumber() + 1);
throw new BlockchainException("Blocks can only be added sequentially");
}
// [1] update block
blockDB.put(Bytes.merge(TYPE_BLOCK_HEADER, Bytes.of(number)), block.toBytesHeader());
blockDB.put(Bytes.merge(TYPE_BLOCK_TRANSACTIONS, Bytes.of(number)), block.toBytesTransactions());
blockDB.put(Bytes.merge(TYPE_BLOCK_RESULTS, Bytes.of(number)), block.toBytesResults());
blockDB.put(Bytes.merge(TYPE_BLOCK_VOTES, Bytes.of(number)), block.toBytesVotes());
indexDB.put(Bytes.merge(TYPE_BLOCK_HASH, hash), Bytes.of(number));
// [2] update transaction indices
List<Transaction> txs = block.getTransactions();
List<Pair<Integer, Integer>> txIndices = block.getTransactionIndices();
Amount reward = config.getBlockReward(number);
for (int i = 0; i < txs.size(); i++) {
Transaction tx = txs.get(i);
reward = Amount.sum(reward, tx.getFee());
SimpleEncoder enc = new SimpleEncoder();
enc.writeLong(number);
enc.writeInt(txIndices.get(i).getLeft());
enc.writeInt(txIndices.get(i).getRight());
indexDB.put(Bytes.merge(TYPE_TRANSACTION_HASH, tx.getHash()), enc.toBytes());
// [3] update transaction_by_account index
addTransactionToAccount(tx, tx.getFrom());
if (!Arrays.equals(tx.getFrom(), tx.getTo())) {
addTransactionToAccount(tx, tx.getTo());
}
}
if (number != genesis.getNumber()) {
// [4] coinbase transaction
Transaction tx = new Transaction(config.network(),
TransactionType.COINBASE,
block.getCoinbase(),
reward,
Amount.ZERO,
block.getNumber(),
block.getTimestamp(),
Bytes.EMPTY_BYTES);
tx.sign(Constants.COINBASE_KEY);
indexDB.put(Bytes.merge(TYPE_TRANSACTION_HASH, tx.getHash()), tx.toBytes());
addTransactionToAccount(tx, block.getCoinbase());
// [5] update validator statistics
List<String> validators = getValidators();
String primary = config.getPrimaryValidator(validators, number, 0,
activatedForks.containsKey(UNIFORM_DISTRIBUTION));
adjustValidatorStats(block.getCoinbase(), StatsType.FORGED, 1);
if (primary.equals(Hex.encode(block.getCoinbase()))) {
adjustValidatorStats(Hex.decode0x(primary), StatsType.HIT, 1);
} else {
adjustValidatorStats(Hex.decode0x(primary), StatsType.MISSED, 1);
}
}
// [6] update validator set
if (number % config.getValidatorUpdateInterval() == 0) {
updateValidators(block.getNumber());
}
// [7] update latest_block
latestBlock = block;
indexDB.put(Bytes.of(TYPE_LATEST_BLOCK_NUMBER), Bytes.of(number));
for (BlockchainListener listener : listeners) {
listener.onBlockAdded(block);
}
}
/**
* Attempt to activate pending forks at current height.
*/
private synchronized void activateForks(long number) {
if (config.forkUniformDistributionEnabled()
&& !activatedForks.containsKey(UNIFORM_DISTRIBUTION)
&& number <= UNIFORM_DISTRIBUTION.activationDeadline
&& forkActivated(number, ValidatorActivatedFork.UNIFORM_DISTRIBUTION)) {
// persist the activated fork
activatedForks.put(UNIFORM_DISTRIBUTION,
new ValidatorActivatedFork.Activation(UNIFORM_DISTRIBUTION, number));
setActivatedForks(activatedForks);
logger.info("Fork UNIFORM_DISTRIBUTION activated at block {}", number);
}
}
@Override
public Genesis getGenesis() {
return genesis;
}
@Override
public void addListener(BlockchainListener listener) {
listeners.add(listener);
}
@Override
public int getTransactionCount(byte[] address) {
byte[] cnt = indexDB.get(Bytes.merge(TYPE_ACCOUNT_TRANSACTION, address));
return (cnt == null) ? 0 : Bytes.toInt(cnt);
}
@Override
public List<Transaction> getTransactions(byte[] address, int from, int to) {
List<Transaction> list = new ArrayList<>();
int total = getTransactionCount(address);
for (int i = from; i < total && i < to; i++) {
byte[] key = getNthTransactionIndexKey(address, i);
byte[] value = indexDB.get(key);
list.add(getTransaction(value));
}
return list;
}
@Override
public List<String> getValidators() {
List<String> validators = new ArrayList<>();
byte[] v = indexDB.get(Bytes.of(TYPE_VALIDATORS));
if (v != null) {
SimpleDecoder dec = new SimpleDecoder(v);
int n = dec.readInt();
for (int i = 0; i < n; i++) {
validators.add(dec.readString());
}
}
return validators;
}
@Override
public ValidatorStats getValidatorStats(byte[] address) {
byte[] key = Bytes.merge(TYPE_VALIDATOR_STATS, address);
byte[] value = indexDB.get(key);
return (value == null) ? new ValidatorStats(0, 0, 0) : ValidatorStats.fromBytes(value);
}
/**
* Updates the validator set.
*
* @param number
*/
protected void updateValidators(long number) {
List<String> validators = new ArrayList<>();
List<Delegate> delegates = delegateState.getDelegates();
int max = Math.min(delegates.size(), config.getNumberOfValidators(number));
for (int i = 0; i < max; i++) {
Delegate d = delegates.get(i);
validators.add(Hex.encode(d.getAddress()));
}
SimpleEncoder enc = new SimpleEncoder();
enc.writeInt(validators.size());
for (String v : validators) {
enc.writeString(v);
}
indexDB.put(Bytes.of(TYPE_VALIDATORS), enc.toBytes());
}
/**
* Adjusts validator statistics.
*
* @param address
* validator address
* @param type
* stats type
* @param delta
* difference
*/
protected void adjustValidatorStats(byte[] address, StatsType type, long delta) {
byte[] key = Bytes.merge(TYPE_VALIDATOR_STATS, address);
byte[] value = indexDB.get(key);
ValidatorStats stats = (value == null) ? new ValidatorStats(0, 0, 0) : ValidatorStats.fromBytes(value);
switch (type) {
case FORGED:
stats.setBlocksForged(stats.getBlocksForged() + delta);
break;
case HIT:
stats.setTurnsHit(stats.getTurnsHit() + delta);
break;
case MISSED:
stats.setTurnsMissed(stats.getTurnsMissed() + delta);
break;
default:
break;
}
indexDB.put(key, stats.toBytes());
}
/**
* Sets the total number of transaction of an account.
*
* @param address
* @param total
*/
protected void setTransactionCount(byte[] address, int total) {
indexDB.put(Bytes.merge(TYPE_ACCOUNT_TRANSACTION, address), Bytes.of(total));
}
/**
* Adds a transaction to an account.
*
* @param tx
* @param address
*/
protected void addTransactionToAccount(Transaction tx, byte[] address) {
int total = getTransactionCount(address);
indexDB.put(getNthTransactionIndexKey(address, total), tx.getHash());
setTransactionCount(address, total + 1);
}
/**
* Returns the N-th transaction index key of an account.
*
* @param address
* @param n
* @return
*/
protected byte[] getNthTransactionIndexKey(byte[] address, int n) {
return Bytes.merge(Bytes.of(TYPE_ACCOUNT_TRANSACTION), address, Bytes.of(n));
}
@Override
public Map<ValidatorActivatedFork, ValidatorActivatedFork.Activation> getActivatedForks() {
Map<ValidatorActivatedFork, ValidatorActivatedFork.Activation> activations = new HashMap<>();
SimpleDecoder simpleDecoder = new SimpleDecoder(indexDB.get(getActivatedForksKey()));
final int numberOfForks = simpleDecoder.readInt();
for (int i = 0; i < numberOfForks; i++) {
ValidatorActivatedFork.Activation activation = ValidatorActivatedFork.Activation
.fromBytes(simpleDecoder.readBytes());
activations.put(activation.fork, activation);
}
return activations;
}
private void setActivatedForks(Map<ValidatorActivatedFork, ValidatorActivatedFork.Activation> activatedForks) {
SimpleEncoder simpleEncoder = new SimpleEncoder();
simpleEncoder.writeInt(activatedForks.size());
for (Map.Entry<ValidatorActivatedFork, ValidatorActivatedFork.Activation> entry : activatedForks.entrySet()) {
simpleEncoder.writeBytes(entry.getValue().toBytes());
}
indexDB.put(getActivatedForksKey(), simpleEncoder.toBytes());
}
private byte[] getActivatedForksKey() {
return Bytes.of(TYPE_ACTIVATED_FORKS);
}
/**
* Returns the version of current database.
*
* @return
*/
protected int getDatabaseVersion() {
byte[] versionBytes = indexDB.get(getDatabaseVersionKey());
if (versionBytes == null || versionBytes.length == 0) {
return 0;
} else {
return Bytes.toInt(versionBytes);
}
}
/**
* Returns the database key for #{@link #getDatabaseVersion}.
*
* @return
*/
private byte[] getDatabaseVersionKey() {
return Bytes.of(TYPE_DATABASE_VERSION);
}
/**
* Validator statistics.
*
*/
public static class ValidatorStats {
private long blocksForged;
private long turnsHit;
private long turnsMissed;
public ValidatorStats(long forged, long hit, long missed) {
this.blocksForged = forged;
this.turnsHit = hit;
this.turnsMissed = missed;
}
public long getBlocksForged() {
return blocksForged;
}
void setBlocksForged(long forged) {
this.blocksForged = forged;
}
public long getTurnsHit() {
return turnsHit;
}
void setTurnsHit(long hit) {
this.turnsHit = hit;
}
public long getTurnsMissed() {
return turnsMissed;
}
void setTurnsMissed(long missed) {
this.turnsMissed = missed;
}
public byte[] toBytes() {
SimpleEncoder enc = new SimpleEncoder();
enc.writeLong(blocksForged);
enc.writeLong(turnsHit);
enc.writeLong(turnsMissed);
return enc.toBytes();
}
public static ValidatorStats fromBytes(byte[] bytes) {
SimpleDecoder dec = new SimpleDecoder(bytes);
long forged = dec.readLong();
long hit = dec.readLong();
long missed = dec.readLong();
return new ValidatorStats(forged, hit, missed);
}
}
/**
* Checks if a fork is activated at a certain height of this blockchain.
*
* @param height
* A blockchain height to check.
* @param fork
* An instance of ${@link ValidatorActivatedFork} to check.
* @return
*/
@Override
public synchronized boolean forkActivated(final long height, ValidatorActivatedFork fork) {
// skips genesis block
if (height <= 1) {
return false;
}
// checks whether the fork has been activated and recorded in database
if (activatedForks.containsKey(fork)) {
return height >= activatedForks.get(fork).activatedAt;
}
// returns memoized result of fork activation lookup at current height
ForkActivationMemory currentHeightActivationMemory = forkActivationMemoryCache
.getIfPresent(ImmutablePair.of(fork, height));
if (currentHeightActivationMemory != null) {
return currentHeightActivationMemory.activatedBlocks >= fork.activationBlocks;
}
// sets boundaries:
// lookup from (height - 1)
// to (height - fork.activationBlocksLookup)
final long higherBound = height - 1;
final long lowerBound = Math.min(Math.max(height - fork.activationBlocksLookup, 1), higherBound);
long activatedBlocks = 0;
// O(1) dynamic-programming lookup, see the definition of ForkActivationMemory
ForkActivationMemory forkActivationMemory = forkActivationMemoryCache
.getIfPresent(ImmutablePair.of(fork, height - 1));
if (forkActivationMemory != null) {
activatedBlocks = forkActivationMemory.activatedBlocks -
(forkActivationMemory.lowerBoundActivated && lowerBound > 1 ? 1 : 0) +
(getBlockHeader(higherBound).getDecodedData().signalingFork(fork) ? 1 : 0);
} else { // O(m) traversal lookup
for (long i = higherBound; i >= lowerBound; i--) {
activatedBlocks += getBlockHeader(i).getDecodedData().signalingFork(fork) ? 1 : 0;
}
}
// memorizes
forkActivationMemoryCache.put(
ImmutablePair.of(fork, height),
new ForkActivationMemory(
getBlockHeader(lowerBound).getDecodedData().signalingFork(fork),
activatedBlocks));
// returns
boolean activated = activatedBlocks >= fork.activationBlocks;
if (activatedBlocks > 0) {
logger.debug("Fork activation of {} at height {}: {} / {} (activated = {}) in the past {} blocks",
fork.name,
height,
activatedBlocks,
fork.activationBlocks, activated, fork.activationBlocksLookup);
}
return activated;
}
/**
* <code>
* ForkActivationMemory[height].lowerBoundActivated =
* forkActivated(height - ${@link ValidatorActivatedFork#activationBlocksLookup})
*
* ForkActivationMemory[height].activatedBlocks =
* ForkActivationMemory[height - 1].activatedBlocks -
* ForkActivationMemory[height - 1].lowerBoundActivated ? 1 : 0 +
* forkActivated(height - 1) ? 1 : 0
* </code>
*/
private static class ForkActivationMemory {
/**
* Whether the fork is activated at height
* <code>(current height -{@link ValidatorActivatedFork#activationBlocksLookup})</code>.
*/
public final boolean lowerBoundActivated;
/**
* The number of activated blocks at the memorized height.
*/
public final long activatedBlocks;
public ForkActivationMemory(boolean lowerBoundActivated, long activatedBlocks) {
this.lowerBoundActivated = lowerBoundActivated;
this.activatedBlocks = activatedBlocks;
}
}
/**
* A temporary blockchain for database migration. This class implements a
* lightweight version of
* ${@link org.semux.consensus.SemuxBft#applyBlock(Block)} to migrate blocks
* from an existing database to the latest schema.
*/
private class MigrationBlockchain extends BlockchainImpl {
private MigrationBlockchain(Config config, DatabaseFactory dbFactory) {
super(config, dbFactory);
}
public void applyBlock(Block block) {
// [0] execute transactions against local state
TransactionExecutor transactionExecutor = new TransactionExecutor(config);
transactionExecutor.execute(block.getTransactions(), getAccountState(), getDelegateState());
// [1] apply block reward and tx fees
Amount reward = config.getBlockReward(block.getNumber());
for (Transaction tx : block.getTransactions()) {
reward = Amount.sum(reward, tx.getFee());
}
if (reward.gt0()) {
getAccountState().adjustAvailable(block.getCoinbase(), reward);
}
// [2] commit the updates
getAccountState().commit();
getDelegateState().commit();
// [3] add block to chain
addBlock(block);
}
}
/**
* Database migration from version 0 to version 1. The migration process creates
* a temporary ${@link MigrationBlockchain} then migrates all blocks from an
* existing blockchain database to the created temporary blockchain database.
* Once all blocks have been successfully migrated, the existing blockchain
* database is replaced by the migrated temporary blockchain database.
*/
private class MigrationBlockDbVersion001 implements Migration {
@Override
public void migrate(Config config, DatabaseFactory dbFactory) {
try {
logger.info("Upgrading the database... DO NOT CLOSE THE WALLET!");
// recreate block db in a temporary folder
TempDatabaseFactory tempDb = new TempDatabaseFactory();
MigrationBlockchain migrationBlockchain = new MigrationBlockchain(config, tempDb);
final long latestBlockNumber = getLatestBlockNumber();
for (long i = 1; i <= latestBlockNumber; i++) {
migrationBlockchain.applyBlock(getBlock(i));
if (i % 1000 == 0) {
logger.info("Loaded {} / {} blocks", i, latestBlockNumber);
}
}
dbFactory.close();
tempDb.close();
// replace the database folder with the recreated database
FileUtil.recursiveDelete(dbFactory.getDataDir().toFile());
tempDb.move(dbFactory.getDataDir());
dbFactory.open();
logger.info("Database upgraded to version 1.");
} catch (IOException e) {
logger.error("Failed to run migration " + MigrationBlockDbVersion001.class, e);
}
}
}
}
<file_sep>### How to connect Semux Testnet
You can connect to the testnet via the following command
```
./semux-gui.sh --network testnet
```
### Resources
* Testnet faucet: https://www.semux.org/testnetfaucet
* Testnet explorer: https://testnet.semux.info/<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg;
public enum ReasonCode {
/**
* [0x00] Reserved code.
*/
RESERVED(0x00),
/**
* [0x01] Incompatible protocol.
*/
INCOMPATIBLE_PROTOCOL(0x01),
/**
* [0x02] Too many active peers.
*/
TOO_MANY_PEERS(0x02),
/**
* [0x03] Invalid handshake message.
*/
INVALID_HANDSHAKE(0x03),
/**
* [0x04] Duplicated peerId.
*/
DUPLICATED_PEER_ID(0x04),
/**
* [0x05] The message queue is full.
*/
MESSAGE_QUEUE_FULL(0x05),
/**
* [0x06] Another validator peer tries to connect using the same IP.
*/
VALIDATOR_IP_LIMITED(0x06),
/**
* [0x07] The peer tries to re-handshake.
*/
HANDSHAKE_EXISTS(0x07),
/**
* [0x08] The manifests malicious behavior.
*/
BAD_PEER(0x08);
private int code;
private static final ReasonCode[] intToCode = new ReasonCode[256];
static {
for (ReasonCode mc : ReasonCode.values()) {
intToCode[mc.code] = mc;
}
}
public static ReasonCode of(int code) {
return intToCode[0xff & code];
}
ReasonCode(int code) {
this.code = code;
}
public int getCode() {
return code;
}
public byte toByte() {
return (byte) code;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api.http;
import static org.awaitility.Awaitility.await;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Map;
import java.util.Scanner;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.semux.KernelMock;
import org.semux.api.ApiHandlerResponse;
import org.semux.rules.KernelRule;
import org.semux.util.BasicAuth;
import io.netty.handler.codec.http.HttpHeaders;
public class HttpHandlerTest {
@Rule
public KernelRule kernelRule = new KernelRule(51610, 51710);
private String uri = null;
private Map<String, String> params = null;
private HttpHeaders headers = null;
private KernelMock kernel;
private SemuxApiService server;
private String ip;
private int port;
private String auth;
@Before
public void setUp() {
kernel = kernelRule.getKernel();
server = new SemuxApiService(kernel);
ip = kernel.getConfig().apiListenIp();
port = kernel.getConfig().apiListenPort();
auth = BasicAuth.generateAuth(kernel.getConfig().apiUsername(), kernel.getConfig().apiPassword());
}
@After
public void tearDown() {
server.stop();
}
private void startServer(HttpChannelInitializer httpChannelInitializer) {
// wait for server to boot up
new Thread(() -> server.start(ip, port, httpChannelInitializer == null ? new HttpChannelInitializer() {
@Override
HttpHandler initHandler() {
return new HttpHandler(kernel.getConfig(), (u, p, h) -> {
uri = u;
params = p;
headers = h;
return new ApiHandlerResponse(true, "test");
});
}
} : httpChannelInitializer)).start();
await().until(() -> server.isRunning());
}
@Test(expected = IOException.class)
public void testAuth() throws IOException {
startServer(null);
URL url = new URL("http://" + ip + ":" + port + "/getinfo");
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestProperty("c", "d");
con.setRequestMethod("POST");
con.setDoOutput(true);
con.getOutputStream().write("e=f".getBytes());
Scanner s = new Scanner(con.getInputStream());
s.nextLine();
s.close();
}
@Test
public void testPOST() throws IOException {
startServer(null);
URL url = new URL("http://" + ip + ":" + port + "/test?a=b");
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestProperty("c", "d");
con.setRequestProperty("Authorization", auth);
con.setRequestMethod("POST");
con.setDoOutput(true);
con.getOutputStream().write("e=f".getBytes());
Scanner s = new Scanner(con.getInputStream());
s.nextLine();
s.close();
assertEquals("/test", uri);
assertEquals("b", params.get("a"));
assertEquals("f", params.get("e"));
assertEquals("d", headers.get("c"));
}
@Test
public void testGET() throws IOException {
startServer(null);
URL url = new URL("http://" + ip + ":" + port + "/test?a=b&e=f");
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestProperty("c", "d");
con.setRequestProperty("Authorization", auth);
Scanner s = new Scanner(con.getInputStream());
s.nextLine();
s.close();
assertEquals("/test", uri);
assertEquals("b", params.get("a"));
assertEquals("f", params.get("e"));
assertEquals("d", headers.get("c"));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api.response;
import org.semux.api.ApiHandlerResponse;
import com.fasterxml.jackson.annotation.JsonProperty;
public class DoTransactionResponse extends ApiHandlerResponse {
@JsonProperty("result")
public final String txHash;
public DoTransactionResponse(
@JsonProperty("success") Boolean success,
@JsonProperty("message") String message,
@JsonProperty("result") String txHash) {
super(success, message);
this.txHash = txHash;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api.http;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import org.semux.Kernel;
import org.semux.api.ApiHandlerImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
/**
* Semux API launcher
*
*/
public class SemuxApiService {
private static final Logger logger = LoggerFactory.getLogger(SemuxApiService.class);
private static final ThreadFactory factory = new ThreadFactory() {
AtomicInteger cnt = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "api-" + cnt.getAndIncrement());
}
};
private Kernel kernel;
private Channel channel;
private EventLoopGroup bossGroup;
private EventLoopGroup workerGroup;
public SemuxApiService(Kernel kernel) {
this.kernel = kernel;
}
/**
* Starts API server with configured binding address.
*/
public void start() {
start(kernel.getConfig().apiListenIp(), kernel.getConfig().apiListenPort());
}
/**
* Starts API server at the given binding IP and port.
*
* @param ip
* @param port
*/
public void start(String ip, int port) {
start(ip, port, new SemuxAPIHttpChannelInitializer());
}
/**
* Starts API server at the given binding IP and port, with the specified
* channel initializer.
*
* @param ip
* @param port
* @param httpChannelInitializer
*/
public void start(String ip, int port, HttpChannelInitializer httpChannelInitializer) {
try {
bossGroup = new NioEventLoopGroup(1, factory);
workerGroup = new NioEventLoopGroup(0, factory);
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO)).childHandler(httpChannelInitializer);
logger.info("Starting API server: address = {}:{}", ip, port);
channel = b.bind(ip, port).sync().channel();
} catch (Exception e) {
logger.error("Failed to start API server", e);
}
}
/**
* Stops the API server if started.
*/
public void stop() {
if (isRunning() && channel.isOpen()) {
try {
channel.close().sync();
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
// workerGroup.terminationFuture().sync();
// bossGroup.terminationFuture().sync();
channel = null;
} catch (Exception e) {
logger.error("Failed to close channel", e);
}
logger.info("API server shut down");
}
}
/**
* Returns whether the API server is running or not.
*
* @return
*/
public boolean isRunning() {
return channel != null;
}
/**
* The default channel initializer using {@link ApiHandlerImpl}.
*/
private class SemuxAPIHttpChannelInitializer extends HttpChannelInitializer {
@Override
public HttpHandler initHandler() {
return new HttpHandler(kernel.getConfig(), new ApiHandlerImpl(kernel));
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import static org.semux.net.Capability.MAX_NUMBER_OF_CAPABILITIES;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
/**
* Represents a Peer in the Semux network.
*/
public class Peer {
/*
* Below are the listening IP address and port number, not necessarily the real
* address that we're connecting to.
*/
private String ip;
private int port;
private short networkVersion;
private String clientId;
private String peerId;
private long latestBlockNumber;
/*
* Variables below are not persisted
*/
private long latency;
/**
* Set of capabilities the peer supports
*/
private CapabilitySet capabilities;
/**
* Create a new Peer instance.
*
* @param ip
* @param port
* @param networkVersion
* @param clientId
* @param peerId
* @param latestBlockNumber
* @param capabilities
*/
public Peer(String ip, int port, short networkVersion, String clientId, String peerId, long latestBlockNumber,
CapabilitySet capabilities) {
super();
this.ip = ip;
this.port = port;
this.peerId = peerId;
this.latestBlockNumber = latestBlockNumber;
this.networkVersion = networkVersion;
this.clientId = clientId;
this.capabilities = capabilities;
}
public boolean validate() {
return ip != null && ip.length() <= 128
&& port >= 0
&& networkVersion >= 0
&& clientId != null && clientId.length() < 128
&& peerId != null && peerId.length() == 40
&& latestBlockNumber >= 0
&& capabilities != null && capabilities.size() <= MAX_NUMBER_OF_CAPABILITIES;
}
/**
* Returns the listening IP address.
*
* @return
*/
public String getIp() {
return ip;
}
/**
* Returns the listening port number.
*
* @return
*/
public int getPort() {
return port;
}
/**
* Returns the network version.
*
* @return
*/
public short getNetworkVersion() {
return networkVersion;
}
/**
* Returns the client id.
*
* @return
*/
public String getClientId() {
return clientId;
}
/**
* Returns the peerId.
*
* @return
*/
public String getPeerId() {
return peerId;
}
/**
* Returns the latestBlockNumber.
*
* @return
*/
public long getLatestBlockNumber() {
return latestBlockNumber;
}
/**
* Sets the latestBlockNumber.
*
* @param number
*/
public void setLatestBlockNumber(long number) {
this.latestBlockNumber = number;
}
/**
* Returns peer latency.
*
* @return
*/
public long getLatency() {
return latency;
}
/**
* Sets peer latency.
*
* @param latency
*/
public void setLatency(long latency) {
this.latency = latency;
}
/**
* Getter for property 'capabilities'.
*
* @return Value for property 'capabilities'.
*/
public CapabilitySet getCapabilities() {
return capabilities;
}
/**
* Converts into a byte array.
*
* @return
*/
public byte[] toBytes() {
SimpleEncoder enc = new SimpleEncoder();
enc.writeString(ip);
enc.writeInt(port);
enc.writeShort(networkVersion);
enc.writeString(clientId);
enc.writeString(peerId);
enc.writeLong(latestBlockNumber);
// encode capabilities
enc.writeInt(capabilities.size());
for (String capability : capabilities.toList()) {
enc.writeString(capability);
}
return enc.toBytes();
}
/**
* Parses from a byte array.
*
* @param bytes
* @return
*/
public static Peer fromBytes(byte[] bytes) {
SimpleDecoder dec = new SimpleDecoder(bytes);
String ip = dec.readString();
int port = dec.readInt();
short p2pVersion = dec.readShort();
String clientId = dec.readString();
String peerId = dec.readString();
long latestBlockNumber = dec.readLong();
// decode capabilities
final int numberOfCapabilities = Math.min(dec.readInt(), MAX_NUMBER_OF_CAPABILITIES);
String[] capabilityList = new String[numberOfCapabilities];
for (int i = 0; i < numberOfCapabilities; i++) {
capabilityList[i] = dec.readString();
}
return new Peer(ip, port, p2pVersion, clientId, peerId, latestBlockNumber, CapabilitySet.of(capabilityList));
}
/** {@inheritDoc} */
@Override
public String toString() {
return getPeerId() + "@" + ip + ":" + port;
}
}<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.semux</groupId>
<artifactId>semux</artifactId>
<version>1.0.2</version>
<packaging>jar</packaging>
<description>Semux is an experimental high-performance blockchain platform that powers decentralized application.</description>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<dist.phase>install</dist.phase>
<dist.base>${project.basedir}/dist</dist.base>
<dist.windowsExecutableVersion>1.0.2.0</dist.windowsExecutableVersion>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<organization>
<name>Semux Foundation</name>
<url>https://www.semux.org</url>
</organization>
<licenses>
<license>
<name>MIT License</name>
<url>https://opensource.org/licenses/MIT</url>
<distribution>repo</distribution>
</license>
</licenses>
<issueManagement>
<system>GitHub</system>
<url>https://github.com/semuxproject/semux/issues</url>
</issueManagement>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>bintray</name>
<url>https://jcenter.bintray.com</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>bintray-plugins</name>
<url>https://jcenter.bintray.com</url>
</pluginRepository>
</pluginRepositories>
<build>
<plugins>
<!-- compile -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.7.0</version>
</plugin>
<!-- build swagger document -->
<plugin>
<groupId>com.github.kongchen</groupId>
<artifactId>swagger-maven-plugin</artifactId>
<version>3.1.6</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>generate</goal>
</goals>
</execution>
</executions>
<configuration>
<apiSources>
<apiSource>
<info>
<title>Semux</title>
<description>${project.description}</description>
<version>${project.version}</version>
<contact>
<name>${project.organization.name}</name>
<url>${project.organization.url}</url>
</contact>
<license>
<name>MIT License</name>
<url>https://opensource.org/licenses/mit-license.php</url>
</license>
</info>
<securityDefinitions>
<securityDefinition>
<name>basicAuth</name>
<type>basic</type>
</securityDefinition>
</securityDefinitions>
<schemes>
<scheme>http</scheme>
</schemes>
<locations>
<location>org.semux.api.SemuxApi</location>
</locations>
<swaggerDirectory>target/swagger-ui</swaggerDirectory>
</apiSource>
</apiSources>
</configuration>
<dependencies>
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.0</version>
</dependency>
</dependencies>
</plugin>
<!-- build fat jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
<configuration>
<archive>
<index>true</index>
<manifestEntries>
<Main-Class>org.semux.wrapper.Wrapper</Main-Class>
<Implementation-Version>${git.commit.id.abbrev}</Implementation-Version>
<X-Compile-Source-JDK>${maven.compiler.source}</X-Compile-Source-JDK>
<X-Compile-Target-JDK>${maven.compiler.target}</X-Compile-Target-JDK>
</manifestEntries>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.1.0</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>shaded</shadedClassifierName>
<artifactSet>
<excludes>
<exclude>junit:*</exclude>
<exclude>org.powermock:*</exclude>
<exclude>com.github.stefanbirkner:system-rules</exclude>
<exclude>org.assertj:*</exclude>
<exclude>org.awaitility:*</exclude>
<exclude>org.easytesting:*</exclude>
<exclude>net.bytebuddy:*</exclude>
<exclude>org.hamcrest:*</exclude>
<exclude>org.mockito:*</exclude>
</excludes>
</artifactSet>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
<resource>.SF</resource>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
<resource>LICENSE</resource>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<manifestEntries>
<Main-Class>org.semux.wrapper.Wrapper</Main-Class>
<Implementation-Version>${git.commit.id.abbrev}</Implementation-Version>
<X-Compile-Source-JDK>${maven.compiler.source}</X-Compile-Source-JDK>
<X-Compile-Target-JDK>${maven.compiler.target}</X-Compile-Target-JDK>
</manifestEntries>
</transformer>
</transformers>
</configuration>
</plugin>
<plugin>
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<version>2.2.4</version>
<executions>
<execution>
<id>get-the-git-infos</id>
<goals>
<goal>revision</goal>
</goals>
</execution>
<execution>
<id>validate-the-git-infos</id>
<goals>
<goal>validateRevision</goal>
</goals>
<phase>package</phase>
</execution>
</executions>
<configuration>
<abbrevLength>7</abbrevLength>
<verbose>true</verbose>
<skipPoms>false</skipPoms>
</configuration>
</plugin>
<!-- build windows executable (currently GUI mode only) -->
<plugin>
<groupId>com.akathist.maven.plugins.launch4j</groupId>
<artifactId>launch4j-maven-plugin</artifactId>
<version>1.7.21</version>
<executions>
<execution>
<id>semux</id>
<phase>package</phase>
<goals>
<goal>launch4j</goal>
</goals>
<configuration>
<headerType>gui</headerType>
<jar>${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar</jar>
<outfile>${project.build.directory}/semux.exe</outfile>
<downloadUrl>http://java.com/download</downloadUrl>
<classPath>
<mainClass>org.semux.gui.SemuxGui</mainClass>
<preCp>anything</preCp>
</classPath>
<icon>src/main/resources/org/semux/gui/logo.ico</icon>
<splash>
<!-- this file must be a 24-bit BMP -->
<file>misc/design/splash.bmp</file>
<waitForWindow>true</waitForWindow>
<timeout>600</timeout>
<timeoutErr>true</timeoutErr>
</splash>
<jre>
<minVersion>1.8.0</minVersion>
<jdkPreference>preferJre</jdkPreference>
<maxHeapPercent>80</maxHeapPercent>
</jre>
<versionInfo>
<fileVersion>${dist.windowsExecutableVersion}</fileVersion>
<fileDescription>${project.description}</fileDescription>
<txtFileVersion>${project.version}</txtFileVersion>
<copyright>MIT</copyright>
<productVersion>${dist.windowsExecutableVersion}</productVersion>
<txtProductVersion>${project.version}</txtProductVersion>
<productName>semux</productName>
<internalName>semux</internalName>
<originalFilename>semux.exe</originalFilename>
</versionInfo>
<singleInstance>
<mutexName>semux</mutexName>
</singleInstance>
</configuration>
</execution>
</executions>
</plugin>
<!-- build macos executable -->
<plugin>
<groupId>sh.tak.appbundler</groupId>
<artifactId>appbundle-maven-plugin</artifactId>
<version>1.2.0</version>
<configuration>
<mainClass>org.semux.wrapper.Wrapper</mainClass>
<workingDirectory>$APP_ROOT/..</workingDirectory>
<!-- <generateDiskImageFile>true</generateDiskImageFile> -->
<jvmVersion>1.8</jvmVersion>
<version>${project.version}-${git.commit.id.abbrev}</version>
<iconFile>logo.icns</iconFile>
<additionalResources>
<additionalResource>
<includes>
<include>config/**</include>
<include>LICENSE*</include>
</includes>
<directory>${project.basedir}</directory>
</additionalResource>
</additionalResources>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>bundle</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- prepare distribution -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.8</version>
<executions>
<execution>
<id>package</id>
<phase>generate-resources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<echo file="${project.build.outputDirectory}/VERSION">${git.commit.id.abbrev}</echo>
</target>
</configuration>
</execution>
<execution>
<id>dist</id>
<phase>${dist.phase}</phase>
<configuration>
<target>
<!-- windows build -->
<copy todir="${dist.base}/windows">
<fileset dir="${project.basedir}">
<include name="config/**" />
<include name="LICENSE*" />
</fileset>
<fileset dir="${project.basedir}/target">
<include name="semux.exe" />
</fileset>
</copy>
<copy file="${project.basedir}/misc/launch4j/semux.l4j.ini" tofile="${dist.base}/windows/semux.l4j.ini" />
<fixcrlf srcdir="${dist.base}/windows" includes="**/*.ini" eol="dos" eof="asis" />
<fixcrlf srcdir="${dist.base}/windows" includes="config/*" eol="dos" eof="asis" />
<fixcrlf srcdir="${dist.base}/windows" includes="LICENSE*" eol="dos" eof="asis" />
<!-- linux build (executable jar) -->
<copy todir="${dist.base}/linux">
<fileset dir="${project.basedir}">
<include name="config/**" />
<include name="LICENSE*" />
</fileset>
</copy>
<copy file="${project.basedir}/src/main/resources/org/semux/gui/splash.png" todir="${dist.base}/linux/resources" />
<copy file="${project.basedir}/target/semux-${project.version}-shaded.jar" tofile="${dist.base}/linux/semux.jar" />
<copy file="${project.basedir}/scripts/semux-cli.sh" tofile="${dist.base}/linux/semux-cli.sh" />
<copy file="${project.basedir}/scripts/semux-gui.sh" tofile="${dist.base}/linux/semux-gui.sh" />
<chmod file="${dist.base}/linux/semux.jar" perm="755" />
<chmod file="${dist.base}/linux/semux*.sh" perm="755" />
<!-- macos build -->
<!-- use fat jar because .app bundle is not production ready -->
<copy todir="${dist.base}/macos">
<fileset dir="${dist.base}/linux"></fileset>
</copy>
<chmod file="${dist.base}/macos/semux.jar" perm="755" />
<chmod file="${dist.base}/macos/semux*.sh" perm="755" />
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- clean -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-clean-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<filesets>
<fileset>
<directory>${dist.base}</directory>
</fileset>
</filesets>
</configuration>
</plugin>
<!-- code coverage -->
<plugin>
<groupId>org.eluder.coveralls</groupId>
<artifactId>coveralls-maven-plugin</artifactId>
<version>4.3.0</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>2.7</version>
<configuration>
<format>xml</format>
<maxmem>256m</maxmem>
<aggregate>true</aggregate>
<quiet>true</quiet>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
<check />
</configuration>
</plugin>
<!-- license header check -->
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<version>3.0</version>
<configuration>
<header>misc/license_header.txt</header>
<encoding>UTF-8</encoding>
<includes>
<include>src/**/*.java</include>
<include>test/**/*.java</include>
</includes>
</configuration>
</plugin>
<!-- eclipse formatter/validator -->
<plugin>
<groupId>net.revelc.code.formatter</groupId>
<artifactId>formatter-maven-plugin</artifactId>
<version>2.7.0</version>
<configuration>
<encoding>UTF-8</encoding>
<lineEnding>LF</lineEnding>
<configFile>${project.basedir}/misc/formatter_eclipse.xml</configFile>
<directories>
<directory>${project.build.sourceDirectory}</directory>
<directory>${project.build.testSourceDirectory}</directory>
</directories>
</configuration>
</plugin>
<!-- FindBugs -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>3.0.5</version>
<configuration>
<effort>Max</effort>
<!-- FIXME: lower the FindBugs threshold to Medium -->
<threshold>High</threshold>
<xmlOutput>true</xmlOutput>
<excludeFilterFile>${project.basedir}/findbugs-exclude.xml</excludeFilterFile>
</configuration>
</plugin>
<!-- testing -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20.1</version>
<configuration>
<!-- do not reuse JVM instances -->
<forkCount>1</forkCount>
<reuseForks>false</reuseForks>
<!-- allow flaky tests -->
<rerunFailingTestsCount>2</rerunFailingTestsCount>
<argLine>-Xmx1024m</argLine>
</configuration>
</plugin>
<!-- verify the dependency chain -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>3.0.0-M1</version>
<executions>
<execution>
<id>enforce</id>
<phase>validate</phase>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<skip>false</skip>
<rules>
<DependencyConvergence />
<digestRule implementation="uk.co.froot.maven.enforcer.DigestRule">
<!-- Create a snapshot to build the list of URNs below -->
<buildSnapshot>false</buildSnapshot>
<!-- List of required hashes -->
<!-- Format is URN of groupId:artifactId:version:type:classifier:scope:hash -->
<!-- classifier is "null" if not present -->
<urns>
<urn>com.akathist.maven.plugins.launch4j:launch4j-maven-plugin:1.7.21:maven-plugin:null:runtime:58402e775916bc8824e257c8d87ad6c8064663a5</urn>
<urn>com.fasterxml.jackson.core:jackson-annotations:2.9.0:jar:null:compile:07c10d545325e3a6e72e06381afe469fd40eb701</urn>
<urn>com.fasterxml.jackson.core:jackson-core:2.9.4:jar:null:compile:a9a71ec1aa37da47db168fede9a4a5fb5e374320</urn>
<urn>com.fasterxml.jackson.core:jackson-databind:2.9.4:jar:null:compile:498bbc3b94f566982c7f7c6d4d303fce365529be</urn>
<urn>com.github.ben-manes.caffeine:caffeine:2.6.2:jar:null:compile:c8fd8817f9d2b2ced82f8968e0dd943aab557de0</urn>
<urn>com.github.kongchen:swagger-maven-plugin:3.1.6:maven-plugin:null:runtime:4c733e81674312c7794a4180095c9053baf900d5</urn>
<urn>com.github.oshi:oshi-core:3.4.4:jar:null:compile:f1aa8e053d26f49b909a845745018c57fe9d7a74</urn>
<urn>com.github.stefanbirkner:system-rules:1.17.1:jar:null:test:3642fe208063ad538ec6a2fca141d13e15f2b1f2</urn>
<urn>com.github.zafarkhaja:java-semver:0.9.0:jar:null:compile:59a83ca73c72a5e25b3f0b1bb305230a11000329</urn>
<urn>com.google.zxing:core:3.3.2:jar:null:compile:f47e3fc99fb2755b8a354d60efa021a0b88f4180</urn>
<urn>com.mycila:license-maven-plugin:3.0:maven-plugin:null:runtime:6e6175f847574fd15c644a69b540e279ea5d173f</urn>
<urn>commons-beanutils:commons-beanutils:1.9.2:jar:null:compile:7a87d845ad3a155297e8f67d9008f4c1e5656b71</urn>
<urn>commons-cli:commons-cli:1.4:jar:null:compile:c51c00206bb913cd8612b24abd9fa98ae89719b1</urn>
<urn>commons-digester:commons-digester:1.8.1:jar:null:compile:3dec9b9c7ea9342d4dbe8c38560080d85b44a015</urn>
<urn>commons-io:commons-io:2.6:jar:null:test:815893df5f31da2ece4040fe0a12fd44b577afaf</urn>
<urn>commons-logging:commons-logging:1.2:jar:null:compile:4bfc12adfe4842bf07b657f0369c4cb522955686</urn>
<urn>commons-validator:commons-validator:1.6:jar:null:compile:e989d1e87cdd60575df0765ed5bac65c905d7908</urn>
<urn>de.erichseifert.vectorgraphics2d:VectorGraphics2D:0.11:jar:null:test:f3bf8fe05b7997e03941bf77428598137500c300</urn>
<urn>io.netty:netty-all:4.1.22.Final:jar:null:compile:c1cea5d30025e4d584d2b287d177c31aea4ae629</urn>
<urn>io.swagger:swagger-annotations:1.5.18:jar:null:compile:f386aa7dc018534e6e05c40fff292e6cd9b9d8f8</urn>
<urn>javax.ws.rs:javax.ws.rs-api:2.1:jar:null:compile:426a0862406536e690c7caa8bb6ed32191986fac</urn>
<urn>junit:junit:4.12:jar:null:test:2973d150c0dc1fefe998f834810d68f278ea58ec</urn>
<urn>net.bytebuddy:byte-buddy-agent:1.7.10:jar:null:test:9b092a8fb9112b89f6cb23fc929cfb931b14007b</urn>
<urn>net.bytebuddy:byte-buddy:1.7.10:jar:null:test:3c8ec287c0a5bd04b1e3f11723bc46a50346602b</urn>
<urn>net.i2p.crypto:eddsa:0.2.0:jar:null:compile:0856a92559c4daf744cb27c93cd8b7eb1f8c4780</urn>
<urn>net.java.dev.jna:jna-platform:4.5.0:jar:null:compile:00ab163522ed76eb01c8c9a750dedacb134fc8c0</urn>
<urn>net.java.dev.jna:jna:4.5.0:jar:null:compile:55b548d3195efc5280bf1c3f17b49659c54dee40</urn>
<urn>net.revelc.code.formatter:formatter-maven-plugin:2.7.0:maven-plugin:null:runtime:001126378253588ea5a042d41714432033ca17a3</urn>
<urn>org.apache.commons:commons-collections4:4.1:jar:null:compile:a4cf4688fe1c7e3a63aa636cc96d013af537768e</urn>
<urn>org.apache.commons:commons-compress:1.9:jar:null:test:cc18955ff1e36d5abd39a14bfe82b19154330a34</urn>
<urn>org.apache.commons:commons-lang3:3.7:jar:null:compile:557edd918fd41f9260963583ebf5a61a43a6b423</urn>
<urn>org.apache.logging.log4j:log4j-api:2.10.0:jar:null:compile:fec5797a55b786184a537abd39c3fa1449d752d6</urn>
<urn>org.apache.logging.log4j:log4j-core:2.10.0:jar:null:compile:c90b597163cd28ab6d9687edd53db601b6ea75a1</urn>
<urn>org.apache.logging.log4j:log4j-slf4j-impl:2.10.0:jar:null:compile:8e4e0a30736175e31c7f714d95032c1734cfbdea</urn>
<urn>org.apache.maven.plugins:maven-antrun-plugin:1.8:maven-plugin:null:runtime:0d02c0af622aa6a0c86e81c519299e888e0a32a3</urn>
<urn>org.apache.maven.plugins:maven-clean-plugin:3.0.0:maven-plugin:null:runtime:6653cb054e6b81705e383fbcced26c92802e40ae</urn>
<urn>org.apache.maven.plugins:maven-compiler-plugin:3.7.0:maven-plugin:null:runtime:a3229c9aae47047e3a177ecfd4afa10fb5512d4e</urn>
<urn>org.apache.maven.plugins:maven-deploy-plugin:2.7:maven-plugin:null:runtime:6dadfb75679ca010b41286794f737088ebfe12fd</urn>
<urn>org.apache.maven.plugins:maven-enforcer-plugin:3.0.0-M1:maven-plugin:null:runtime:02b46cf923a7f5c75ffae61cedff103e0def3560</urn>
<urn>org.apache.maven.plugins:maven-install-plugin:2.4:maven-plugin:null:runtime:9d1316166fe4c313f56276935e08df11f45267c2</urn>
<urn>org.apache.maven.plugins:maven-jar-plugin:3.0.2:maven-plugin:null:runtime:5518cc6a2ed1b1ec52419fa0e18f7e42b6279cb9</urn>
<urn>org.apache.maven.plugins:maven-resources-plugin:2.3:maven-plugin:null:runtime:648f725a387d5b38c86d5837b8c2cf456f8d93c7</urn>
<urn>org.apache.maven.plugins:maven-shade-plugin:3.1.0:maven-plugin:null:runtime:c08cd5163a49e31f6e28d6f8bb153013a2a2de79</urn>
<urn>org.apache.maven.plugins:maven-site-plugin:3.3:maven-plugin:null:runtime:77ba1752b1ac4c4339d6f11554800960a56a4ae1</urn>
<urn>org.apache.maven.plugins:maven-surefire-plugin:2.20.1:maven-plugin:null:runtime:5f3f8f23a8c3525e64b06310b54a2f080690eb59</urn>
<urn>org.assertj:assertj-core:3.8.0:jar:null:test:b209d90ff2e279bee3e02547ee7b11349c52d0e3</urn>
<urn>org.assertj:assertj-swing-junit:3.8.0:jar:null:test:51a93135b0edac8f92cd67bceea6efa2e2dfa870</urn>
<urn>org.assertj:assertj-swing:3.8.0:jar:null:test:43e985f95237bf9c5fbfa21230f30cc5e859a4fb</urn>
<urn>org.awaitility:awaitility:3.0.0:jar:null:test:7e6b79f2b1e57eaa967b229791f31ca55aea668a</urn>
<urn>org.bitlet:weupnp:0.1.4:jar:null:compile:b99cd791ede89b7c17426e6c51a0f171dc925def</urn>
<urn>org.bouncycastle:bcprov-jdk15on:1.59:jar:null:compile:2507204241ab450456bdb8e8c0a8f986e418bd99</urn>
<urn>org.codehaus.mojo:cobertura-maven-plugin:2.7:maven-plugin:null:runtime:0d49d827220fc4282babb85fb3b9970ffdb99aff</urn>
<urn>org.codehaus.mojo:findbugs-maven-plugin:3.0.5:maven-plugin:null:runtime:b3c2f7fcf51637697fe7da4d38ad795e54682355</urn>
<urn>org.easytesting:fest-reflect:1.4.1:jar:null:test:2b92d5275e92a49e16c7ce6bd7e46b9080db0530</urn>
<urn>org.easytesting:fest-util:1.2.5:jar:null:test:c4a8d7305b23b8d043be12c979813b096df11f44</urn>
<urn>org.eluder.coveralls:coveralls-maven-plugin:4.3.0:maven-plugin:null:runtime:2f9f6bdef5dd1d863730a134a8f1280337440c46</urn>
<urn>org.ethereum:leveldbjni-all:1.18.3:jar:null:compile:189e46b64f39a5f4f6de2cbdf20f42061b10d961</urn>
<urn>org.hamcrest:hamcrest-core:1.3:jar:null:test:42a25dc3219429f0e5d060061f71acb49bf010a0</urn>
<urn>org.hamcrest:hamcrest-library:1.3:jar:null:test:4785a3c21320980282f9f33d0d1264a69040538f</urn>
<urn>org.javassist:javassist:3.21.0-GA:jar:null:test:598244f595db5c5fb713731eddbb1c91a58d959b</urn>
<urn>org.knowm.xchart:xchart:3.5.0:jar:null:test:d0ec5cff2450cf013c4289d5c7c730c245a701ba</urn>
<urn>org.mockito:mockito-core:2.8.9:jar:null:test:1afb35b2d77d40567756c379e54c18da3574a96e</urn>
<urn>org.objenesis:objenesis:2.6:jar:null:test:639033469776fd37c08358c6b92a4761feb2af4b</urn>
<urn>org.powermock:powermock-api-mockito-common:1.7.3:jar:null:test:54c112a921efad764eef571a1cf5b465d110c7df</urn>
<urn>org.powermock:powermock-api-mockito2:1.7.3:jar:null:test:0d505af2c1dcf0aa357bf66d5c206d7b6e512e42</urn>
<urn>org.powermock:powermock-api-support:1.7.3:jar:null:test:8f5b46fa56773007430bd0347af9fe72f8acb4c4</urn>
<urn>org.powermock:powermock-core:1.7.3:jar:null:test:73cd1fff8c9685be1ff2f369b561546e7549e003</urn>
<urn>org.powermock:powermock-module-junit4-common:1.7.3:jar:null:test:7287dd56b2b907a15aa2b6a0f1ddb2dd52cf388f</urn>
<urn>org.powermock:powermock-module-junit4:1.7.3:jar:null:test:f25cd17a4d7acba1a591beaecd99fad844875cc6</urn>
<urn>org.powermock:powermock-reflect:1.7.3:jar:null:test:f3d3c696a9977094c796dc8ffd174f828f3410e8</urn>
<urn>org.rauschig:jarchivelib:0.7.1:jar:null:test:47a77274cc38cfe6c04bc2afa3ece808742dc42b</urn>
<urn>org.slf4j:slf4j-api:1.7.25:jar:null:compile:da76ca59f6a57ee3102f8f9bd9cee742973efa8a</urn>
<urn>org.threeten:threetenbp:1.3.6:jar:null:compile:89dcc04a7e028c3c963413a71f950703cf51f057</urn>
<urn>org.xerial.snappy:snappy-java:1.1.7.1:jar:null:compile:d5190b41f3de61e3b83d692322d58630252bc8c3</urn>
<urn>pl.project13.maven:git-commit-id-plugin:2.2.4:maven-plugin:null:runtime:43a0dc1d02821c51847d587ecc4a6943086b884c</urn>
<urn>sh.tak.appbundler:appbundle-maven-plugin:1.2.0:maven-plugin:null:runtime:75a9e8c2adc7203b9b443b4547570da25c619f0d</urn>
</urns>
</digestRule>
</rules>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>uk.co.froot.maven.enforcer</groupId>
<artifactId>digest-enforcer-rules</artifactId>
<version>0.0.1</version>
</dependency>
</dependencies>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven
build itself. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-antrun-plugin
</artifactId>
<versionRange>
[1.8,)
</versionRange>
<goals>
<goal>run</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencies>
<!-- swagger API -->
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
<version>2.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-annotations</artifactId>
<version>1.5.18</version>
<scope>compile</scope>
</dependency>
<!-- Logging framework -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.10.0</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.10.0</version>
<exclusions>
<!-- NOTE: log4j requires a newer version of slfj which is still in beta -->
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Bouncy Castle -->
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<version>1.59</version>
</dependency>
<!-- LevelDB -->
<dependency>
<groupId>org.ethereum</groupId>
<artifactId>leveldbjni-all</artifactId>
<version>1.18.3</version>
</dependency>
<!-- Netty network framework -->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>4.1.22.Final</version>
</dependency>
<!-- Snappy Java -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.7.1</version>
</dependency>
<!-- Oshi Core -->
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>3.4.4</version>
</dependency>
<!-- Apache commons -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.7</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>4.1</version>
</dependency>
<dependency>
<groupId>commons-validator</groupId>
<artifactId>commons-validator</artifactId>
<version>1.6</version>
<exclusions>
<exclusion>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.6</version>
<scope>test</scope>
</dependency>
<!-- JSON -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.9.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.9.4</version>
</dependency>
<!-- ED25519 -->
<dependency>
<groupId>net.i2p.crypto</groupId>
<artifactId>eddsa</artifactId>
<version>0.2.0</version>
</dependency>
<!-- zxing QR -->
<dependency>
<groupId>com.google.zxing</groupId>
<artifactId>core</artifactId>
<version>3.3.2</version>
</dependency>
<!-- weupnp -->
<dependency>
<groupId>org.bitlet</groupId>
<artifactId>weupnp</artifactId>
<version>0.1.4</version>
</dependency>
<!-- semantic version -->
<dependency>
<groupId>com.github.zafarkhaja</groupId>
<artifactId>java-semver</artifactId>
<version>0.9.0</version>
</dependency>
<!-- Caffeine Caching Library -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>2.6.2</version>
</dependency>
<!-- Testing Libraries -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<version>3.8.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-swing</artifactId>
<version>3.8.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-swing-junit</artifactId>
<version>3.8.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>1.7.3</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>1.7.3</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
</exclusion>
<exclusion>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId>
</exclusion>
<exclusion>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>1.17.1</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit-dep</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>3.0.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- newer versions to support java 9 -->
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>1.7.10</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId>
<version>1.7.10</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
<version>2.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.rauschig</groupId>
<artifactId>jarchivelib</artifactId>
<version>0.7.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.knowm.xchart</groupId>
<artifactId>xchart</artifactId>
<version>3.5.0</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>windows</id>
<activation>
<os>
<family>windows</family>
</os>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.20.1</version>
<configuration>
<groups>org.semux.windows.WindowsIntegrationTest</groups>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>integration-test</goal>
</goals>
</execution>
<execution>
<id>verify</id>
<goals>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
<file_sep>### Total supply
The max supply of semux is capped at 100,000,000 SEM.
| Purpose | Amount | Note |
|---------------|----------------|------------------------------------------------------------|
| Block Rewards | 75,000,000 SEM | Distributed to Semux validators |
| Foundation | 10,000,000 SEM | Development, marketing, promotion and bounties |
| Community | 10,000,000 SEM | Alpha/beta/rc test, bitcointalk airdrop, BTC giveway, etc. |
| Founder | 5,000,000 SEM | Founder and core developers of Semux |
### Block reward
Semux are created each time a validator forges a new block.
```java
public static long getBlockReward(long number) {
if (number <= 25_000_000) {
return 3 * Unit.SEM;
} else {
return 0;
}
}
```
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api;
import static org.semux.config.Constants.JSON_MIME;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import org.semux.api.response.GetBlockResponse;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
/**
* Additional console-only API
*
*/
public interface ConsoleApi {
@GET
@Path("get_block_by_number")
@ApiOperation(value = "Get block by block number", notes = "Returns a block.", response = GetBlockResponse.class)
@Produces(JSON_MIME)
ApiHandlerResponse getBlockByNumber(
@ApiParam(value = "Block number", required = true) @QueryParam("number") String blockNum);
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import io.netty.buffer.ByteBuf;
/**
* Represent a frame in the Semux network. Numbers are signed and in big-endian.
*
* <ul>
* <li><code>FRAME := HEADER (16 bytes) + BODY (variable length)</code></li>
* <li><code>HEADER := VERSION + COMPRESS_TYPE + PACKET_TYPE + PACKET_ID + PACKET_SIZE + BODY_SIZE</code></li>
* <li><code>BODY := BINARY_DATA</code></li>
* </ul>
*/
public class Frame {
public static final int HEADER_SIZE = 16;
public static final short VERSION = 0;
public static final byte COMPRESS_NONE = 0;
public static final byte COMPRESS_SNAPPY = 1;
protected short version; /* version, 2 bytes */
protected byte compressType; /* compress type, 1 byte */
protected byte packetType; /* packet type, 1 byte */
protected int packetId; /* packet id, 4 bytes */
protected int packetSize; /* packet size, 4 bytes */
protected int bodySize; /* body size, 4 bytes */
protected byte[] body;
public Frame(short version, byte compressType, byte packetType, int packetId, int packetSize, int bodySize,
byte[] body) {
this.version = version;
this.compressType = compressType;
this.packetType = packetType;
this.packetId = packetId;
this.packetSize = packetSize;
this.bodySize = bodySize;
this.body = body;
}
public short getVersion() {
return version;
}
public byte getCompressType() {
return compressType;
}
public byte getPacketType() {
return packetType;
}
public int getPacketId() {
return packetId;
}
public int getPacketSize() {
return packetSize;
}
public int getBodySize() {
return bodySize;
}
/**
* Returns the frame body, which may be null.
*
* @return
*/
public byte[] getBody() {
return body;
}
/**
* Sets the frame body.
*
* @param body
*/
public void setBody(byte[] body) {
this.body = body;
}
/**
* Returns whether the packet is chunked.
*
* @return
*/
public boolean isChunked() {
return bodySize != packetSize;
}
/**
* Writes frame header into the buffer.
*
*/
public void writeHeader(ByteBuf buf) {
buf.writeShort(getVersion());
buf.writeByte(getCompressType());
buf.writeByte(getPacketType());
buf.writeInt(getPacketId());
buf.writeInt(getPacketSize());
buf.writeInt(getBodySize());
}
/**
* Reads frame header from the given buffer.
*
* @param in
* @return
*/
public static Frame readHeader(ByteBuf in) {
short version = in.readShort();
byte compressType = in.readByte();
byte packetType = in.readByte();
int packetId = in.readInt();
int packetSize = in.readInt();
int bodySize = in.readInt();
return new Frame(version, compressType, packetType, packetId, packetSize, bodySize, null);
}
@Override
public String toString() {
return "Frame [version=" + version + ", compressType=" + compressType + ", packetType=" + packetType
+ ", packetId=" + packetId + ", packetSize=" + packetSize + ", bodySize=" + bodySize + "]";
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPasswordField;
import javax.swing.JRadioButton;
import javax.swing.LayoutStyle.ComponentPlacement;
import javax.swing.WindowConstants;
import javax.swing.border.EmptyBorder;
import javax.swing.filechooser.FileNameExtensionFilter;
import org.semux.core.Wallet;
import org.semux.crypto.Key;
import org.semux.message.GuiMessages;
import org.semux.util.SystemUtil;
import org.semux.util.exception.UnreachableException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WelcomeFrame extends JFrame implements ActionListener {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(WelcomeFrame.class);
private JPasswordField txtPassword;
private JPasswordField txtPasswordRepeat;
private JLabel lblPasswordRepeat;
private JRadioButton btnCreate;
private JRadioButton btnRecover;
private transient Wallet wallet;
private transient File backupFile = null;
private transient boolean done = false;
public WelcomeFrame(Wallet wallet) {
this.wallet = wallet;
// setup frame properties
this.setTitle(GuiMessages.get("SemuxWallet"));
this.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
this.setIconImage(SwingUtil.loadImage("logo", 128, 128).getImage());
this.setMinimumSize(new Dimension(600, 400));
SwingUtil.alignFrameToMiddle(this, 600, 400);
// create banner
JLabel banner = new JLabel("");
banner.setIcon(SwingUtil.loadImage("banner", 125, 160));
// create description
JLabel description = new JLabel(GuiMessages.get("WelcomeDescriptionHtml"));
// create select button group
Color color = new Color(220, 220, 220);
JPanel panel = new JPanel();
panel.setBorder(new EmptyBorder(8, 3, 8, 3));
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
panel.setBackground(color);
ButtonGroup buttonGroup = new ButtonGroup();
btnCreate = new JRadioButton(GuiMessages.get("CreateNewAccount"));
btnCreate.setName("btnCreate");
btnCreate.setSelected(true);
btnCreate.setBackground(color);
btnCreate.setActionCommand(Action.CREATE_ACCOUNT.name());
btnCreate.addActionListener(this);
buttonGroup.add(btnCreate);
panel.add(btnCreate);
btnRecover = new JRadioButton(GuiMessages.get("ImportAccountsFromBackupFile"));
btnRecover.setName("btnRecover");
btnRecover.setBackground(color);
btnRecover.setActionCommand(Action.RECOVER_ACCOUNTS.name());
btnRecover.addActionListener(this);
buttonGroup.add(btnRecover);
panel.add(btnRecover);
// create buttons
JButton btnCancel = SwingUtil.createDefaultButton(GuiMessages.get("Cancel"), this, Action.CANCEL);
btnCancel.setName("btnCancel");
JButton btnNext = SwingUtil.createDefaultButton(GuiMessages.get("Next"), this, Action.OK);
btnNext.setName("btnNext");
btnNext.setSelected(true);
JLabel lblPassword = new JLabel(GuiMessages.get("Password") + ":");
txtPassword = new JPasswordField();
txtPassword.setName("txtPassword");
txtPassword.setActionCommand(Action.OK.name());
txtPassword.addActionListener(this);
lblPasswordRepeat = new JLabel(GuiMessages.get("RepeatPassword") + ":");
txtPasswordRepeat = new JPasswordField();
txtPasswordRepeat.setName("txtPasswordRepeat");
txtPasswordRepeat.setActionCommand(Action.OK.name());
txtPasswordRepeat.addActionListener(this);
// @formatter:off
GroupLayout groupLayout = new GroupLayout(this.getContentPane());
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.TRAILING)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addComponent(btnCancel, GroupLayout.PREFERRED_SIZE, 92, GroupLayout.PREFERRED_SIZE)
.addGap(18)
.addComponent(btnNext, GroupLayout.PREFERRED_SIZE, 84, GroupLayout.PREFERRED_SIZE))
.addGroup(groupLayout.createSequentialGroup()
.addGap(21)
.addComponent(banner, GroupLayout.PREFERRED_SIZE, 140, GroupLayout.PREFERRED_SIZE)
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(lblPasswordRepeat)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(panel, GroupLayout.DEFAULT_SIZE, 490, Short.MAX_VALUE)
.addComponent(lblPassword)
.addComponent(description, GroupLayout.DEFAULT_SIZE, 490, Short.MAX_VALUE))
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING, false)
.addComponent(txtPasswordRepeat, Alignment.LEADING)
.addComponent(txtPassword, Alignment.LEADING, GroupLayout.DEFAULT_SIZE, 242, Short.MAX_VALUE))))))
.addGap(32))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING, false)
.addGroup(groupLayout.createSequentialGroup()
.addGap(10)
.addComponent(description)
.addGap(10)
.addComponent(panel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addGap(20)
.addComponent(lblPassword)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(txtPassword, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.UNRELATED)
.addComponent(lblPasswordRepeat)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(txtPasswordRepeat, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE))
.addGroup(groupLayout.createSequentialGroup()
.addGap(10)
.addComponent(banner, GroupLayout.PREFERRED_SIZE, 293, GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(ComponentPlacement.RELATED, 84, Short.MAX_VALUE)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(btnNext)
.addComponent(btnCancel))
.addGap(21))
);
// @formatter:on
this.getContentPane().setLayout(groupLayout);
}
@Override
public void actionPerformed(ActionEvent e) {
Action action = Action.valueOf(e.getActionCommand());
switch (action) {
case CREATE_ACCOUNT:
createAccount();
break;
case RECOVER_ACCOUNTS:
recoverAccounts();
break;
case OK:
ok();
break;
case CANCEL:
SystemUtil.exitAsync(SystemUtil.Code.OK);
break;
default:
throw new UnreachableException();
}
}
/**
* Waits the welcome frame to be finished.
*/
public void join() {
synchronized (this) {
while (!done) {
try {
wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
SystemUtil.exitAsync(SystemUtil.Code.OK);
}
}
}
}
/**
* Set the <code>done</code> flag to be true and notify all waiting threads.
*/
protected void done() {
synchronized (this) {
done = true;
notifyAll();
}
}
/**
* When the CREATE_ACCOUNT option is selected.
*/
protected void createAccount() {
selectCreate();
}
/**
* When the RECOVER_ACCOUNTS option is selected.
*/
protected void recoverAccounts() {
JFileChooser chooser = new JFileChooser();
chooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
chooser.setFileFilter(new FileNameExtensionFilter(GuiMessages.get("WalletBinaryFormat"), "data"));
int ret = chooser.showOpenDialog(this);
if (ret == JFileChooser.APPROVE_OPTION) {
selectRecover(chooser.getSelectedFile());
} else {
btnCreate.setSelected(true);
}
}
/**
* When the OK button is clicked.
*/
protected void ok() {
String password = new String(txtPassword.getPassword());
String passwordRepeat = new String(txtPasswordRepeat.getPassword());
if (isCreate() && !password.equals(passwordRepeat)) {
JOptionPane.showMessageDialog(this, GuiMessages.get("RepeatPasswordError"));
return;
}
// paranoid check
if (wallet.exists()) {
logger.error("Wallet already exists!");
SystemUtil.exitAsync(SystemUtil.Code.WALLET_ALREADY_EXISTS);
} else if (wallet.isUnlocked()) {
logger.error("Wallet already unlocked!");
SystemUtil.exitAsync(SystemUtil.Code.WALLET_ALREADY_UNLOCKED);
}
if (isCreate()) {
if (wallet.unlock(password)
&& wallet.addAccount(new Key())
&& wallet.flush()) {
done();
} else {
JOptionPane.showMessageDialog(this, GuiMessages.get("WalletSaveFailed"));
SystemUtil.exitAsync(SystemUtil.Code.FAILED_TO_WRITE_WALLET_FILE);
}
} else {
Wallet w = new Wallet(backupFile);
if (!w.unlock(password)) {
JOptionPane.showMessageDialog(this, GuiMessages.get("UnlockFailed"));
} else if (w.size() == 0) {
JOptionPane.showMessageDialog(this, GuiMessages.get("NoAccountFound"));
} else {
if (wallet.unlock(password)
&& wallet.addWallet(w) > 0
&& wallet.flush()) {
done();
} else {
JOptionPane.showMessageDialog(this, GuiMessages.get("WalletSaveFailed"));
SystemUtil.exitAsync(SystemUtil.Code.FAILED_TO_WRITE_WALLET_FILE);
}
}
}
}
protected boolean isCreate() {
return backupFile == null;
}
protected void selectCreate() {
backupFile = null;
txtPasswordRepeat.setVisible(true);
lblPasswordRepeat.setVisible(true);
btnCreate.setSelected(true);
}
protected void selectRecover(File file) {
if (file == null) {
throw new IllegalArgumentException("Selected file can't be null");
}
backupFile = file;
txtPasswordRepeat.setVisible(false);
lblPasswordRepeat.setVisible(false);
btnRecover.setSelected(true);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.bench;
import static org.semux.core.Amount.Unit.NANO_SEM;
import java.util.ArrayList;
import java.util.List;
import org.semux.Network;
import org.semux.TestUtils;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.config.DevnetConfig;
import org.semux.core.Amount;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Blockchain;
import org.semux.core.BlockchainImpl;
import org.semux.core.Genesis;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.TransactionType;
import org.semux.crypto.Key;
import org.semux.crypto.Key.Signature;
import org.semux.rules.TemporaryDatabaseRule;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BlockchainPerformance {
private static final Logger logger = LoggerFactory.getLogger(BlockchainPerformance.class);
private static Config config = new DevnetConfig(Constants.DEFAULT_DATA_DIR);
private static Key key = new Key();
public static Block testBlockCreation() {
long t1 = System.nanoTime();
List<Transaction> txs = new ArrayList<>();
List<TransactionResult> res = new ArrayList<>();
int total = 0;
for (int i = 0;; i++) {
Network network = config.network();
TransactionType type = TransactionType.TRANSFER;
byte[] to = Bytes.random(20);
Amount value = NANO_SEM.of(1);
Amount fee = config.minTransactionFee();
long nonce = 1 + i;
long timestamp = System.currentTimeMillis();
byte[] data = Bytes.EMPTY_BYTES;
Transaction tx = new Transaction(network, type, to, value, fee, nonce, timestamp, data).sign(key);
if (total + tx.size() > config.maxBlockTransactionsSize()) {
break;
}
txs.add(tx);
res.add(new TransactionResult(true));
total += tx.size();
}
long number = 1;
byte[] coinbase = key.toAddress();
byte[] prevHash = Bytes.random(32);
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(txs);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(res);
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
Block block = new Block(header, txs, res);
List<Signature> votes = new ArrayList<>();
for (int i = 0; i < config.getNumberOfValidators(1000000L); i++) {
votes.add(new Key().sign(Bytes.EMPTY_BYTES));
}
block.setView(1);
block.setVotes(votes);
long t2 = System.nanoTime();
logger.info("block # of txs: {}", block.getTransactions().size());
logger.info("block header size: {} B", block.toBytesHeader().length);
logger.info("block transaction size: {} KB", block.toBytesTransactions().length / 1024);
logger.info("block results size: {} KB", block.toBytesResults().length / 1024);
logger.info("block votes size: {} KB", block.toBytesVotes().length / 1024);
logger.info("block total size: {} KB", block.size() / 1024);
logger.info("Perf_block_creation: {} ms", (t2 - t1) / 1_000_000);
return block;
}
public static void testBlockValidation(Block block) {
Genesis gen = Genesis.load(Network.DEVNET);
long t1 = System.nanoTime();
Block.validateHeader(gen.getHeader(), block.getHeader());
Block.validateTransactions(gen.getHeader(), block.getTransactions(), config.network());
Block.validateResults(gen.getHeader(), block.getResults());
// block votes validation skipped
long t2 = System.nanoTime();
logger.info("Perf_block_validation: {} ms", (t2 - t1) / 1_000_000);
}
public static void testTransactionValidation() {
Key key = new Key();
Network network = config.network();
TransactionType type = TransactionType.TRANSFER;
byte[] to = Bytes.random(20);
Amount value = NANO_SEM.of(1);
Amount fee = config.minTransactionFee();
long nonce = 1;
long timestamp = System.currentTimeMillis();
byte[] data = {};
Transaction tx = new Transaction(network, type, to, value, fee, nonce, timestamp, data);
tx.sign(key);
int repeat = 1000;
long t1 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
tx.validate(network);
}
long t2 = System.nanoTime();
logger.info("Perf_transaction_size: {} B", tx.toBytes().length);
logger.info("Perf_transaction_validation: {} μs/time", (t2 - t1) / repeat / 1_000);
}
public static void testAddBlock() throws Throwable {
final int repeat = 10000;
Block[] blocks = new Block[repeat];
for (int i = 0; i < repeat; i++) {
blocks[i] = TestUtils.createEmptyBlock(i);
}
TemporaryDatabaseRule temporaryDbRule = new TemporaryDatabaseRule();
temporaryDbRule.before();
Blockchain blockchain = new BlockchainImpl(config, temporaryDbRule);
long t1 = System.currentTimeMillis();
for (int i = 0; i < repeat; i++) {
blockchain.addBlock(blocks[i]);
}
long t2 = System.currentTimeMillis();
temporaryDbRule.after();
logger.info("Perf_addBlock: {} ms / {} blocks", t2 - t1, repeat);
}
public static void main(String[] args) throws Throwable {
Block block = testBlockCreation();
testBlockValidation(block);
testTransactionValidation();
testAddBlock();
System.exit(0);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg;
import org.semux.crypto.Hex;
import org.semux.net.msg.consensus.BlockHeaderMessage;
import org.semux.net.msg.consensus.BlockMessage;
import org.semux.net.msg.consensus.GetBlockHeaderMessage;
import org.semux.net.msg.consensus.GetBlockMessage;
import org.semux.net.msg.consensus.NewHeightMessage;
import org.semux.net.msg.consensus.NewViewMessage;
import org.semux.net.msg.consensus.ProposalMessage;
import org.semux.net.msg.consensus.VoteMessage;
import org.semux.net.msg.p2p.DisconnectMessage;
import org.semux.net.msg.p2p.GetNodesMessage;
import org.semux.net.msg.p2p.HelloMessage;
import org.semux.net.msg.p2p.NodesMessage;
import org.semux.net.msg.p2p.PingMessage;
import org.semux.net.msg.p2p.PongMessage;
import org.semux.net.msg.p2p.TransactionMessage;
import org.semux.net.msg.p2p.WorldMessage;
import org.semux.util.Bytes;
import org.semux.util.exception.UnreachableException;
public class MessageFactory {
/**
* Decode a raw message.
*
* @param code
* @param encoded
* @return
* @throws MessageException
* if the message is undecodable
*/
public Message create(byte code, byte[] encoded) throws MessageException {
MessageCode c = MessageCode.of(code);
if (c == null) {
throw new MessageException("Invalid message code: " + Hex.encode0x(Bytes.of(code)));
}
try {
switch (c) {
case DISCONNECT:
return new DisconnectMessage(encoded);
case HELLO:
return new HelloMessage(encoded);
case WORLD:
return new WorldMessage(encoded);
case PING:
return new PingMessage(encoded);
case PONG:
return new PongMessage(encoded);
case GET_NODES:
return new GetNodesMessage(encoded);
case NODES:
return new NodesMessage(encoded);
case TRANSACTION:
return new TransactionMessage(encoded);
case GET_BLOCK:
return new GetBlockMessage(encoded);
case BLOCK:
return new BlockMessage(encoded);
case GET_BLOCK_HEADER:
return new GetBlockHeaderMessage(encoded);
case BLOCK_HEADER:
return new BlockHeaderMessage(encoded);
case BFT_NEW_HEIGHT:
return new NewHeightMessage(encoded);
case BFT_NEW_VIEW:
return new NewViewMessage(encoded);
case BFT_PROPOSAL:
return new ProposalMessage(encoded);
case BFT_VOTE:
return new VoteMessage(encoded);
default:
throw new UnreachableException();
}
} catch (Exception e) {
throw new MessageException("Failed to decode message", e);
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.bench;
import java.io.File;
import org.semux.config.Constants;
import org.semux.db.LeveldbDatabase;
import org.semux.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DBPerformance {
private static final Logger logger = LoggerFactory.getLogger(DBPerformance.class);
private static final int REPEAT = 100_000;
private static LeveldbDatabase getTestDB() {
return new LeveldbDatabase(new File(Constants.DEFAULT_DATA_DIR, "test"));
}
public static void testWrite() {
LeveldbDatabase db = getTestDB();
try {
long t1 = System.nanoTime();
for (int i = 0; i < REPEAT; i++) {
byte[] key = Bytes.random(256);
byte[] value = Bytes.random(256);
db.put(key, value);
}
long t2 = System.nanoTime();
logger.info("Perf_db_write: " + (t2 - t1) / 1_000 / REPEAT + " μs/time");
} finally {
db.close();
}
}
public static void testRead() {
LeveldbDatabase db = getTestDB();
try {
long t1 = System.nanoTime();
for (int i = 0; i < REPEAT; i++) {
byte[] key = Bytes.random(256);
db.get(key);
}
long t2 = System.nanoTime();
logger.info("Perf_db_read: " + (t2 - t1) / 1_000 / REPEAT + " μs/time");
} finally {
db.close();
}
}
public static void main(String[] args) {
testWrite();
testRead();
LeveldbDatabase db = getTestDB();
db.destroy();
}
}
<file_sep># Change Log
## [v1.0.1](https://github.com/semuxproject/semux/tree/v1.0.1) (2018-03-06)
[Full Changelog](https://github.com/semuxproject/semux/compare/v1.0.0...v1.0.1)
**Fixed bugs:**
- A validator node might stuck in sync process
- Consensus: Don't sync when a validator is in FINALIZE state [\#655](https://github.com/semuxproject/semux/pull/655) ([semuxdev](https://github.com/semuxdev))
- Consensus: Fix unconditional wait of SemuxSync\#isRunning [\#626](https://github.com/semuxproject/semux/pull/626) ([cryptokat](https://github.com/cryptokat))
- API: Fix typos in API docs [\#666](https://github.com/semuxproject/semux/pull/666) ([orogvany](https://github.com/orogvany))
- GUI: Dispose address book dialog when the wallet GUI is locked [\#599](https://github.com/semuxproject/semux/pull/599) ([phash](https://github.com/phash))
- GUI: Import wallet imports addressbook too [\#576](https://github.com/semuxproject/semux/pull/576) ([orogvany](https://github.com/orogvany))
- GUI: Focus text field on right click [\#540](https://github.com/semuxproject/semux/pull/540) ([cryptokat](https://github.com/cryptokat))
- Net: Properly separate mainnet and testnet [\#568](https://github.com/semuxproject/semux/pull/568) ([cryptokat](https://github.com/cryptokat))
- CLI: Flush async loggers in `Launcher` class [\#539](https://github.com/semuxproject/semux/pull/539) ([cryptokat](https://github.com/cryptokat))
**Implemented enhancements:**
- Add Java 9 Support [\#640](https://github.com/semuxproject/semux/pull/640) ([semuxdev](https://github.com/semuxdev))
- GUI: Support customized based unit and show full digits by default [\#681](https://github.com/semuxproject/semux/pull/681) ([cryptokat](https://github.com/cryptokat))
- GUI: Validate address alias length [\#660](https://github.com/semuxproject/semux/pull/660) ([orogvany](https://github.com/orogvany))
- GUI: Clean up address label [\#630](https://github.com/semuxproject/semux/pull/630) ([semuxdev](https://github.com/semuxdev))
- GUI: Update to new logo [\#606](https://github.com/semuxproject/semux/pull/606) ([orogvany](https://github.com/orogvany))
- GUI: Render to highest precision [\#602](https://github.com/semuxproject/semux/pull/602) ([orogvany](https://github.com/orogvany))
- GUI: Add Mnemonic Keys to the UI [\#589](https://github.com/semuxproject/semux/pull/589) ([phash](https://github.com/phash))
- GUI: Added feedback for empty names on address book entries [\#575](https://github.com/semuxproject/semux/pull/575) ([phash](https://github.com/phash))
- GUI: Add address book edit dialog [\#574](https://github.com/semuxproject/semux/pull/574) ([phash](https://github.com/phash))
- GUI: Add InputDialog to Windows TaskBar [\#571](https://github.com/semuxproject/semux/pull/571) ([phash](https://github.com/phash))
- GUI: Provide detailed tooltip for Data input [\#570](https://github.com/semuxproject/semux/pull/570) ([phash](https://github.com/phash))
- GUI: Add prefix to address in generated QR Code [\#566](https://github.com/semuxproject/semux/pull/566) ([phash](https://github.com/phash))
- GUI: Add a Title to Rename Account Dialog [\#563](https://github.com/semuxproject/semux/pull/563) ([orogvany](https://github.com/orogvany))
- GUI: Add a Title to Import Dialog [\#562](https://github.com/semuxproject/semux/pull/562) ([orogvany](https://github.com/orogvany))
- GUI: Add Semux Logo to About Dialog [\#560](https://github.com/semuxproject/semux/pull/560) ([orogvany](https://github.com/orogvany))
- GUI: Add command console [\#472](https://github.com/semuxproject/semux/pull/472) ([orogvany](https://github.com/orogvany))
- Consensus: Optimize transaction validation [\#675](https://github.com/semuxproject/semux/pull/675) ([orogvany](https://github.com/orogvany))
- Config: Disallow default API username or password [\#689](https://github.com/semuxproject/semux/pull/689) ([cryptokat](https://github.com/cryptokat))
- Net: Shuffle the list of nodes returned from GET\_NODES message in order to balance the load on nodes [\#679](https://github.com/semuxproject/semux/pull/679) ([cryptokat](https://github.com/cryptokat))
- Net: Add mainnet.semux.net as an alternative dns seed [\#662](https://github.com/semuxproject/semux/pull/662) ([cryptokat](https://github.com/cryptokat))
- Net, Config: Allow for additional DNS seeds [\#653](https://github.com/semuxproject/semux/pull/653) ([orogvany](https://github.com/orogvany))
- Core: Upgrade Leveldb From 1.8 to 1.18 [\#673](https://github.com/semuxproject/semux/pull/673) ([cryptokat](https://github.com/cryptokat))
- Core: Improve error reporting of UNVOTE transaction [\#623](https://github.com/semuxproject/semux/pull/623) ([cryptokat](https://github.com/cryptokat))
- Core: Optimize wallet lookup [\#601](https://github.com/semuxproject/semux/pull/601) ([orogvany](https://github.com/orogvany))
- API: Update error messages to be consistent [\#657](https://github.com/semuxproject/semux/pull/657) ([orogvany](https://github.com/orogvany))
- API: Validate `hash` on `getBlock` calls [\#654](https://github.com/semuxproject/semux/pull/654) ([orogvany](https://github.com/orogvany))
- API: Add a parameter 'name' to `/create_account` [\#614](https://github.com/semuxproject/semux/pull/614) ([cryptokat](https://github.com/cryptokat))
- API: Add parameter descriptions [\#600](https://github.com/semuxproject/semux/pull/600) ([orogvany](https://github.com/orogvany))
- API: Consistent error handling [\#556](https://github.com/semuxproject/semux/pull/556) ([orogvany](https://github.com/orogvany))
- API: Pretty print API response when get parameter pretty=true [\#555](https://github.com/semuxproject/semux/pull/555) ([orogvany](https://github.com/orogvany))
- API: Add sign/verify messages calls [\#549](https://github.com/semuxproject/semux/pull/549) ([orogvany](https://github.com/orogvany))
- API: Add a data field `transactionCount` to the response of `/get_account` API [\#543](https://github.com/semuxproject/semux/pull/543) ([cryptokat](https://github.com/cryptokat))
- API: Add data field TransactionType\#blockNumber [\#526](https://github.com/semuxproject/semux/pull/526) ([cryptokat](https://github.com/cryptokat))
- Tools: Upgrade Jackson to 2.9.4 [\#605](https://github.com/semuxproject/semux/pull/605) ([cryptokat](https://github.com/cryptokat))
- Windows: Detect Installation of Microsoft Visual C++ Redistributable Package Under Windows Platform [\#531](https://github.com/semuxproject/semux/pull/531) ([cryptokat](https://github.com/cryptokat))<file_sep>#!/bin/sh
commit=`git rev-parse --short=7 HEAD`
version=`cat pom.xml | grep '^ <version>.*</version>$' | awk -F'[><]' '{print $3}'`
version="$version-$commit"
name=semux
# change work directory
cd "$(dirname "$0")/../"
# build
mvn clean install -DskipTests || exit
# go to dist
cd dist
# Windows
WINDIST=${name}-windows-${version}
WINBALL=${WINDIST}.zip
mv windows ${WINDIST}
zip -r ${WINBALL} ${WINDIST} || exit
sha256sum ${WINBALL} > ${WINDIST}.sha256
sha256sum --check ${WINDIST}.sha256
# Linux
LINUXDIST=${name}-linux-${version}
LINUXBALL=${name}-linux-${version}.tar.gz
mv linux ${LINUXDIST}
tar -czvf ${LINUXBALL} ${LINUXDIST} || exit
sha256sum ${LINUXBALL} > ${LINUXDIST}.sha256
sha256sum --check ${LINUXDIST}.sha256
# macOS
MACDIST=${name}-macos-${version}
MACBALL=${name}-macos-${version}.tar.gz
mv macos ${MACDIST}
tar -czvf ${MACBALL} ${MACDIST} || exit
sha256sum ${MACBALL} > ${MACDIST}.sha256
sha256sum --check ${MACDIST}.sha256
# clean
rm -r ${name}-windows-${version}
rm -r ${name}-linux-${version}
rm -r ${name}-macos-${version}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.model;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.semux.core.Amount.ZERO;
import static org.semux.core.Amount.Unit.NANO_SEM;
import java.util.Arrays;
import org.junit.Test;
import org.semux.Kernel;
import org.semux.core.Amount;
import org.semux.core.Blockchain;
import org.semux.core.state.Delegate;
import org.semux.util.Bytes;
public class WalletDelegateTest {
private final byte[] address = Bytes.random(20);
private final byte[] name = Bytes.of("test");
private final long registeredAt = 2;
private final Amount votes = NANO_SEM.of(3);
@Test
public void testBasic() {
Delegate d = new Delegate(address, name, registeredAt, votes);
WalletDelegate wd = new WalletDelegate(d);
assertThat(wd.getAddress(), equalTo(address));
assertThat(wd.getName(), equalTo(name));
assertThat(wd.getRegisteredAt(), equalTo(registeredAt));
assertThat(wd.getVotes(), equalTo(votes));
assertEquals(ZERO, wd.getVotesFromMe());
assertEquals(0, wd.getNumberOfBlocksForged());
assertEquals(0, wd.getNumberOfTurnsHit());
assertEquals(0, wd.getNumberOfTurnsMissed());
wd.setVotesFromMe(NANO_SEM.of(1));
wd.setNumberOfBlocksForged(2);
wd.setNumberOfTurnsHit(3);
wd.setNumberOfTurnsMissed(4);
assertEquals(NANO_SEM.of(1), wd.getVotesFromMe());
assertEquals(2L, wd.getNumberOfBlocksForged());
assertEquals(3L, wd.getNumberOfTurnsHit());
assertEquals(4L, wd.getNumberOfTurnsMissed());
assertEquals(100.0 * 3 / (3 + 4), wd.getRate(), 10 ^ -8);
}
@Test
public void testIsValidator() {
Kernel kernel = mock(Kernel.class);
Blockchain blockchain = mock(Blockchain.class);
String v1 = "validator1";
String v2 = "validator2";
when(kernel.getBlockchain()).thenReturn(blockchain);
when(blockchain.getValidators()).thenReturn(Arrays.asList(v1, v2));
Delegate d = new Delegate(address, name, registeredAt, votes);
WalletDelegate wd = new WalletDelegate(d);
assertFalse(wd.isValidator(kernel));
d = new Delegate(address, Bytes.of(v1), registeredAt, votes);
wd = new WalletDelegate(d);
assertFalse(wd.isValidator(kernel));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.crypto;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.math.BigInteger;
import java.security.KeyPair;
import java.security.SignatureException;
import java.security.spec.InvalidKeySpecException;
import org.bouncycastle.util.Arrays;
import org.junit.Assert;
import org.junit.Test;
import org.semux.config.Constants;
import org.semux.crypto.Key.Signature;
import org.semux.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.i2p.crypto.eddsa.KeyPairGenerator;
public class KeyTest {
private static final Logger logger = LoggerFactory.getLogger(KeyTest.class);
@Test
public void testKeyStorage() {
Key key = new Key();
System.out.println("pk_encoded : " + Hex.encode(key.pk.getEncoded()));
System.out.println("pk_algo : " + key.pk.getAlgorithm());
System.out.println("pk_format : " + key.pk.getFormat());
System.out.println("pk_A : " + Hex.encode(key.pk.getAbyte()));
System.out.println();
System.out.println("sk_encoded : " + Hex.encode(key.sk.getEncoded()));
System.out.println("sk_algo : " + key.sk.getAlgorithm());
System.out.println("sk_format : " + key.sk.getFormat());
System.out.println("sk_seed : " + Hex.encode(key.sk.getSeed()));
System.out.println("sk_hash_of_seed: " + Hex.encode(key.sk.getH()));
}
@Test
public void testMalleability() {
Key key = new Key();
byte[] data = Bytes.of("test");
byte[] hash = Hash.h256(data);
Signature sig = key.sign(hash);
assertTrue(Key.verify(hash, sig));
byte[] R = Arrays.copyOf(sig.getS(), 32);
byte[] S = Arrays.copyOfRange(sig.getS(), 32, 64);
BigInteger s = new BigInteger(1, S);
BigInteger l = BigInteger.valueOf(2).pow(252).add(new BigInteger("27742317777372353535851937790883648493"));
byte[] sPlusL = s.add(l).toByteArray();
sPlusL = Arrays.copyOfRange(sPlusL, sPlusL.length - 32, sPlusL.length);
Signature sig2 = new Signature(Bytes.merge(R, sPlusL), sig.getA());
assertFalse(Key.verify(hash, sig2));
}
@Test
public void testGenerateKeyPair() throws InvalidKeySpecException {
Key key1 = new Key();
assertEquals(Key.PUBLIC_KEY_LEN, key1.getPublicKey().length);
assertEquals(Key.PRIVATE_KEY_LEN, key1.getPrivateKey().length);
Key key2 = new Key(key1.getPrivateKey(), key1.getPublicKey());
Assert.assertArrayEquals(key1.getPublicKey(), key2.getPublicKey());
Assert.assertArrayEquals(key1.getPrivateKey(), key2.getPrivateKey());
}
@Test(expected = InvalidKeySpecException.class)
public void testPublicPrivateKeyMismatch() throws InvalidKeySpecException {
Key key1 = new Key();
new Key(key1.getPrivateKey(), new byte[Key.PUBLIC_KEY_LEN]);
}
@Test
public void testSignAndVerify() throws SignatureException {
Key key = new Key();
byte[] data = Bytes.of("test");
byte[] hash = Hash.h256(data);
byte[] sig = key.sign(hash).toBytes();
assertEquals(Signature.LENGTH, sig.length);
assertTrue(Key.verify(hash, sig));
assertArrayEquals(key.getPublicKey(), Signature.fromBytes(sig).getPublicKey());
assertArrayEquals(key.toAddress(), Signature.fromBytes(sig).getAddress());
}
@Test
public void testSignLargeData() throws SignatureException {
byte[] data = Bytes.random(1024 * 1024);
Key key = new Key();
Signature sig = key.sign(data);
assertTrue(Key.verify(data, sig));
assertArrayEquals(key.getPublicKey(), sig.getPublicKey());
}
@Test
public void testInvalidSignature() throws SignatureException {
byte[] data = Bytes.of("test");
byte[] hash = Hash.h256(data);
assertFalse(Key.verify(hash, Bytes.random(20)));
assertFalse(Key.verify(hash, Bytes.random(Signature.LENGTH)));
assertFalse(Key.verify(hash, Bytes.random(200)));
}
@Test
public void testSignatureSize() {
Key key = new Key();
byte[] data = Bytes.of("test");
byte[] hash = Hash.h256(data);
byte[] sig = key.sign(hash).toBytes();
logger.info("signature size: {} B, {} GB per year", sig.length,
64.0 * sig.length * Constants.BLOCKS_PER_DAY * 365 / 1024 / 1024 / 1024);
}
@Test
public void testImportPrivateKeyDynamic() throws InvalidKeySpecException {
KeyPairGenerator gen = new KeyPairGenerator();
KeyPair keypair = gen.generateKeyPair();
Key account = new Key(keypair.getPrivate().getEncoded());
assertEquals(Hex.encode(keypair.getPublic().getEncoded()), Hex.encode(account.getPublicKey()));
}
@Test
public void testImportPrivateKeyStatic() throws InvalidKeySpecException {
Key account = new Key(Hex.decode(
"<KEY>0efe5fe254f2af"));
assertEquals("<KEY>",
Hex.encode(account.getPublicKey()));
}
}
<file_sep>#!/bin/bash
export DISPLAY=:99.0
sh -e /etc/init.d/xvfb start
sleep 3 # give xvfb some time to start
<file_sep>#!/bin/bash
HOST=127.0.0.1
PORT=5161
wait_for()
{
echo "waiting for $HOST:$PORT"
start_ts=$(date +%s)
while :
do
nc -v -z $HOST $PORT
result=$?
if [[ $result -eq 0 ]]; then
end_ts=$(date +%s)
echo "$HOST:$PORT is available after $((end_ts - start_ts)) seconds"
break
fi
sleep 1
done
return $result
}
wait_for
RESULT=$?
exit $RESULT
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.crypto.Key;
import org.semux.net.NodeManager.Node;
import org.semux.util.SystemUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.DefaultMessageSizeEstimator;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
/**
* Represents a client which connects to the Semux network.
*/
public class PeerClient {
private static final Logger logger = LoggerFactory.getLogger(PeerClient.class);
private static final ThreadFactory factory = new ThreadFactory() {
AtomicInteger cnt = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "client-" + cnt.getAndIncrement());
}
};
private static final ScheduledExecutorService timer = Executors.newSingleThreadScheduledExecutor(factory);
private ScheduledFuture<?> ipRefreshFuture = null;
private String ip;
private int port;
private Key coinbase;
private EventLoopGroup workerGroup;
/**
* Create a new PeerClient instance.
*
* @param config
* @param coinbase
*/
public PeerClient(Config config, Key coinbase) {
this(config.p2pDeclaredIp().orElse(InetAddress.getLoopbackAddress().getHostAddress()), config.p2pListenPort(),
coinbase);
if (!config.p2pDeclaredIp().isPresent()) {
startIpRefresh();
}
}
/**
* Create a new PeerClient with the given public IP address and coinbase.
*
* @param ip
* @param port
* @param coinbase
*/
public PeerClient(String ip, int port, Key coinbase) {
logger.info("Use IP address: {}", ip);
this.ip = ip;
this.port = port;
this.coinbase = coinbase;
this.workerGroup = new NioEventLoopGroup(0, factory);
}
/**
* Keeps updating public IP address.
*/
protected void startIpRefresh() {
logger.info("Starting IP refresh thread");
ipRefreshFuture = timer.scheduleAtFixedRate(() -> {
String newIp = SystemUtil.getIp();
try {
if (!ip.equals(newIp) && !InetAddress.getByName(newIp).isSiteLocalAddress()) {
logger.info("New IP address detected: {} => {}", ip, newIp);
ip = newIp;
}
} catch (UnknownHostException e) {
logger.error("The fetched IP address is invalid: {}", newIp);
}
}, 0, 30, TimeUnit.SECONDS);
}
/**
* Returns this node.
*
* @return
*/
public Node getNode() {
return new Node(ip, port);
}
/**
* Returns the listening IP address.
*
* @return
*/
public String getIp() {
return ip;
}
/**
* Returns the listening IP port.
*
* @return
*/
public int getPort() {
return port;
}
/**
* Returns the peerId of this client.
*
* @return
*/
public String getPeerId() {
return coinbase.toAddressString();
}
/**
* Returns the coinbase.
*
* @return
*/
public Key getCoinbase() {
return coinbase;
}
/**
* Connects to a remote peer asynchronously.
*
* @param remoteNode
* @return
*/
public ChannelFuture connect(Node remoteNode, SemuxChannelInitializer ci) {
Bootstrap b = new Bootstrap();
b.group(workerGroup);
b.channel(NioSocketChannel.class);
b.option(ChannelOption.SO_KEEPALIVE, true);
b.option(ChannelOption.MESSAGE_SIZE_ESTIMATOR, DefaultMessageSizeEstimator.DEFAULT);
b.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Constants.DEFAULT_CONNECT_TIMEOUT);
b.remoteAddress(remoteNode.toAddress());
b.handler(ci);
return b.connect();
}
/**
* Closes this client.
*/
public void close() {
logger.info("Shutting down PeerClient");
workerGroup.shutdownGracefully();
// workerGroup.terminationFuture().sync();
if (ipRefreshFuture != null) {
ipRefreshFuture.cancel(true);
}
}
}<file_sep>### What is BFT
In fault-tolerant computer systems, and in particular distributed computing systems, Byzantine fault tolerance (BFT) is the characteristic of a system that tolerates the class of failures known as the Byzantine Generals' Problem.
### What is Semux BFT
### Why BFT consensus is preferred
### Semux BFT sepecifications<file_sep>### General configuration
```
################################################################################
# #
# Copyright (c) 2017-2018 The Semux Developers #
# #
# Distributed under the MIT software license, see the accompanying file #
# LICENSE or https://opensource.org/licenses/mit-license.php #
# #
################################################################################
#================
# P2P
#================
# Declared ip address
p2p.declaredIp =
# Binding IP address and port
p2p.listenIp = 0.0.0.0
p2p.listenPort = 5161
# Seed nodes, IP addresses separated by comma
p2p.seedNodes =
#================
# Network
#================
# Max number of inbound connections
net.maxInboundConnections = 1024
# Max number of inbound connections from each unique IP address
net.maxInboundConnectionsPerIp = 5
# Max number of outbound connections
net.maxOutboundConnections = 128
# Max message queue size
net.maxMessageQueueSize = 4096
# Message relay redundancy
net.relayRedundancy = 16
# Channel idle timeout, ms
net.channelIdleTimeout = 120000
# DNS Seed (comma delimited)
net.dnsSeeds.mainNet = mainnet.semux.org,mainnet.semux.net
net.dnsSeeds.testNet = testnet.semux.org
#================
# API
#================
# Be sure to set up authentication first before enabling API
api.enabled = false
# Listening address and port
api.listenIp = 127.0.0.1
api.listenPort = 5171
# Basic authentication
api.username = YOUR_API_USERNAME
api.password = <PASSWORD>
#================
# UI
#================
# Specify the localization of UI
# ui.locale = en_US
# Specify the unit & fraction digits of values
# ui.unit must be one of SEM, mSEM, μSEM
ui.unit = SEM
ui.fractionDigits = 9
```
### IP whitelist and blacklist
Example:
```
{
"rules": [
{"type": "ACCEPT", "address": "127.0.0.1/8"},
{"type": "ACCEPT", "address": "192.168.0.0/16"},
{"type": "REJECT", "address": "8.8.8.8"}
]
}
```<file_sep>* Submit issue tickets at https://github.com/semuxproject/semux/issues
* Please do not leave your semux address in the issue ticket. Instead, you're recommended to put your address on your profile bio.
* Only tickets labelled as `bug` are eligible for bug bounty. The reward is 100 ~ 100,000 SEM based on its severity and impact.
* We process bug bounty monthly. You can also contact <EMAIL>, if you urgently need it.
* Thanks!<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux;
import java.util.Collections;
import java.util.List;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.crypto.Key;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
public class TestUtils {
public static Block createBlock(long number, List<Transaction> txs, List<TransactionResult> res) {
Key key = new Key();
byte[] coinbase = key.toAddress();
byte[] prevHash = Bytes.EMPTY_HASH;
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(txs);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(res);
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
return new Block(header, txs, res);
}
public static Block createEmptyBlock(long number) {
return createBlock(number, Collections.emptyList(), Collections.emptyList());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.panel;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.semux.core.Amount.ZERO;
import static org.semux.core.Amount.Unit.SEM;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.RandomUtils;
import org.assertj.swing.edt.GuiActionRunner;
import org.assertj.swing.fixture.FrameFixture;
import org.assertj.swing.junit.testcase.AssertJSwingJUnitTestCase;
import org.assertj.swing.timing.Timeout;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.semux.KernelMock;
import org.semux.core.Blockchain;
import org.semux.core.BlockchainImpl;
import org.semux.core.PendingManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.TransactionType;
import org.semux.core.state.Delegate;
import org.semux.core.state.DelegateState;
import org.semux.crypto.Key;
import org.semux.gui.SwingUtil;
import org.semux.gui.WalletModelRule;
import org.semux.gui.model.WalletDelegate;
import org.semux.message.GuiMessages;
import org.semux.rules.KernelRule;
import org.semux.util.Bytes;
@RunWith(MockitoJUnitRunner.Silent.class)
public class DelegatesPanelTest extends AssertJSwingJUnitTestCase {
@Rule
public KernelRule kernelRule1 = new KernelRule(51610, 51710);
@Rule
public WalletModelRule walletRule = new WalletModelRule(SEM.of(10000), SEM.of(1));
@Captor
ArgumentCaptor<Transaction> transactionArgumentCaptor = ArgumentCaptor.forClass(Transaction.class);
DelegatesPanelTestApplication application;
FrameFixture window;
List<WalletDelegate> walletDelegates;
@Mock
private Blockchain blockchain;
@Mock
private DelegateState delegateState;
private Key delegateAccount1;
@Mock
private WalletDelegate delegate1;
@Mock
private BlockchainImpl.ValidatorStats delegateStats1;
private Key delegateAccount2;
@Mock
private WalletDelegate delegate2;
@Mock
private BlockchainImpl.ValidatorStats delegateStats2;
@Mock
PendingManager pendingManager;
private static final Key DELEGATE_KEY = new Key();
KernelMock kernelMock;
@Override
public void onSetUp() {
// mock delegates
walletDelegates = new ArrayList<>();
delegateAccount1 = new Key();
when(delegate1.getNameString()).thenReturn("delegate 1");
when(delegate1.getAddressString()).thenReturn(delegateAccount1.toAddressString());
when(delegate1.getAddress()).thenReturn(delegateAccount1.toAddress());
when(delegate1.getVotes()).thenReturn(ZERO);
when(delegate1.getVotesFromMe()).thenReturn(ZERO);
walletDelegates.add(delegate1);
delegateAccount2 = new Key();
when(delegate2.getNameString()).thenReturn("delegate 2");
when(delegate2.getAddressString()).thenReturn(delegateAccount2.toAddressString());
when(delegate2.getAddress()).thenReturn(delegateAccount2.toAddress());
when(delegate2.getVotes()).thenReturn(ZERO);
when(delegate2.getVotesFromMe()).thenReturn(ZERO);
walletDelegates.add(delegate2);
when(walletRule.walletModel.getDelegates()).thenReturn(walletDelegates);
// mock kernel
kernelMock = spy(kernelRule1.getKernel());
when(delegateState.getVote(any(), any())).thenReturn(ZERO);
when(delegateState.getDelegateByAddress(DELEGATE_KEY.toAddress())).thenReturn(mock(Delegate.class));
when(delegateState.getDelegateByName(Bytes.of("semux"))).thenReturn(mock(Delegate.class));
when(blockchain.getDelegateState()).thenReturn(delegateState);
when(blockchain.getValidatorStats(delegate1.getAddress())).thenReturn(delegateStats1);
when(blockchain.getValidatorStats(delegate2.getAddress())).thenReturn(delegateStats2);
when(kernelMock.getBlockchain()).thenReturn(blockchain);
}
@Override
public void onTearDown() {
Mockito.reset(kernelMock);
}
@Test
public void testSelectDelegate() {
when(kernelMock.getPendingManager()).thenReturn(pendingManager);
application = GuiActionRunner
.execute(() -> new DelegatesPanelTestApplication(walletRule.walletModel, kernelMock));
window = new FrameFixture(robot(), application);
window.show().requireVisible().moveToFront();
// the initial label of selected delegate should be PleaseSelectDelegate
window.label("SelectedDelegateLabel").requireText(GuiMessages.get("PleaseSelectDelegate"));
// click on the first delegate
window.table("DelegatesTable").cell("delegate 1").requireNotEditable().click();
// the label of selected delegate should display the first delegate's name
window.label("SelectedDelegateLabel").requireText(GuiMessages.get("SelectedDelegate", "delegate 1"));
// click on the second delegate
window.table("DelegatesTable").cell("delegate 2").requireNotEditable().click();
// the label of selected delegate should display the second delegate's name
window.label("SelectedDelegateLabel").requireText(GuiMessages.get("SelectedDelegate", "delegate 2"));
}
@Test
public void testVoteSuccess() {
testVote(new PendingManager.ProcessTransactionResult(1));
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("SuccessDialogTitle")).requireVisible();
}
@Test
public void testVoteFailure() {
testVote(new PendingManager.ProcessTransactionResult(0, TransactionResult.Error.INSUFFICIENT_AVAILABLE));
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("ErrorDialogTitle")).requireVisible();
}
@Test
public void testInsufficientLocked() {
testUnvote("10");
window.optionPane(Timeout.timeout(1000))
.requireMessage(GuiMessages.get("InsufficientLockedFunds", SwingUtil.formatAmount(SEM.of(10))))
.requireVisible();
}
@Test
public void testInsufficientVotesForDelegate() {
// try to unvote delegate 2
testUnvote("1");
window.optionPane(Timeout.timeout(1000)).requireMessage(GuiMessages.get("InsufficientVotes")).requireVisible();
}
private void testVote(PendingManager.ProcessTransactionResult mockResult) {
// mock pending manager
when(pendingManager.getNonce(any())).thenReturn(RandomUtils.nextLong());
when(pendingManager.addTransactionSync(any())).thenReturn(mockResult);
when(kernelMock.getPendingManager()).thenReturn(pendingManager);
application = GuiActionRunner
.execute(() -> new DelegatesPanelTestApplication(walletRule.walletModel, kernelMock));
window = new FrameFixture(robot(), application);
window.show().requireVisible().moveToFront();
// the initial label of selected delegate should be PleaseSelectDelegate
window.label("SelectedDelegateLabel").requireText(GuiMessages.get("PleaseSelectDelegate"));
// click on the first delegate
window.table("DelegatesTable").cell("delegate 1").requireNotEditable().click();
// fills number of votes
window.textBox("textVote").requireEditable().setText("10");
// click vote button
window.button("btnVote").requireVisible().click();
}
private void testUnvote(String amount) {
when(kernelMock.getPendingManager()).thenReturn(pendingManager);
application = GuiActionRunner
.execute(() -> new DelegatesPanelTestApplication(walletRule.walletModel, kernelMock));
window = new FrameFixture(robot(), application);
window.show().requireVisible().moveToFront();
// the initial label of selected delegate should be PleaseSelectDelegate
window.label("SelectedDelegateLabel").requireText(GuiMessages.get("PleaseSelectDelegate"));
// click on the first delegate
window.table("DelegatesTable").cell("delegate 1").requireNotEditable().click();
// fills number of votes
window.textBox("textUnvote").requireEditable().setText(amount);
// click vote button
window.button("btnUnvote").requireVisible().click();
}
@Test
public void testDelegateSuccess() {
testDelegate("test_delegate", new PendingManager.ProcessTransactionResult(1));
confirmDelegateRegistration();
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("SuccessDialogTitle")).requireVisible();
// verify added transaction
verify(pendingManager).addTransactionSync(transactionArgumentCaptor.capture());
Transaction tx = transactionArgumentCaptor.getValue();
assertEquals(TransactionType.DELEGATE, tx.getType());
assertArrayEquals(Bytes.EMPTY_ADDRESS, tx.getTo());
assertEquals(kernelMock.getConfig().minDelegateBurnAmount(), tx.getValue());
assertEquals(kernelMock.getConfig().minTransactionFee(), tx.getFee());
}
@Test
public void testDelegateFailureInsufficientAvailable() {
testDelegate("test_delegate",
new PendingManager.ProcessTransactionResult(0, TransactionResult.Error.INSUFFICIENT_AVAILABLE));
confirmDelegateRegistration();
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("ErrorDialogTitle")).requireVisible();
}
@Test
public void testDelegateFailureDuplicatedName() {
testDelegate("semux", null);
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("ErrorDialogTitle")).requireVisible();
}
@Test
public void testDelegateFailureDuplicatedAddress() {
walletRule.walletModel.getAccounts().get(0).setKey(DELEGATE_KEY);
testDelegate("semux", null);
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("ErrorDialogTitle")).requireVisible();
}
private void testDelegate(String name, PendingManager.ProcessTransactionResult mockResult) {
// mock pending manager
when(pendingManager.getNonce(any())).thenReturn(RandomUtils.nextLong());
when(pendingManager.addTransactionSync(any())).thenReturn(mockResult);
when(kernelMock.getPendingManager()).thenReturn(pendingManager);
application = GuiActionRunner
.execute(() -> new DelegatesPanelTestApplication(walletRule.walletModel, kernelMock));
window = new FrameFixture(robot(), application);
window.show().requireVisible().moveToFront();
// fills delegate name
window.textBox("textName").requireEditable().setText(name);
// click register button
window.button("btnDelegate").requireVisible().click();
}
private void confirmDelegateRegistration() {
window.optionPane(Timeout.timeout(1000)).requireTitle(GuiMessages.get("ConfirmDelegateRegistration"))
.requireVisible()
.yesButton().requireVisible().click();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import org.semux.Kernel;
import org.semux.config.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.DefaultMessageSizeEstimator;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LoggingHandler;
/**
* Represents a server in the Semux network
*/
public class PeerServer {
private static final Logger logger = LoggerFactory.getLogger(PeerServer.class);
private static final ThreadFactory factory = new ThreadFactory() {
AtomicInteger cnt = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "server-" + cnt.getAndIncrement());
}
};
protected Kernel kernel;
protected Channel channel;
private NioEventLoopGroup bossGroup;
private NioEventLoopGroup workerGroup;
public PeerServer(Kernel kernel) {
this.kernel = kernel;
}
public void start() {
start(kernel.getConfig().p2pListenIp(), kernel.getConfig().p2pListenPort());
}
public void start(String ip, int port) {
if (isRunning()) {
return;
}
try {
bossGroup = new NioEventLoopGroup(1, factory);
workerGroup = new NioEventLoopGroup(0, factory);
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup);
b.channel(NioServerSocketChannel.class);
b.option(ChannelOption.SO_KEEPALIVE, true);
b.option(ChannelOption.MESSAGE_SIZE_ESTIMATOR, DefaultMessageSizeEstimator.DEFAULT);
b.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Constants.DEFAULT_CONNECT_TIMEOUT);
b.handler(new LoggingHandler());
b.childHandler(new SemuxChannelInitializer(kernel, null));
logger.info("Starting peer server: address = {}:{}", ip, port);
channel = b.bind(ip, port).sync().channel();
} catch (Exception e) {
logger.error("Failed to start peer server", e);
}
}
public void stop() {
if (isRunning() && channel.isOpen()) {
try {
channel.close().sync();
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
// workerGroup.terminationFuture().sync();
// bossGroup.terminationFuture().sync();
ConnectionLimitHandler.reset();
channel = null;
} catch (Exception e) {
logger.error("Failed to close channel", e);
}
logger.info("PeerServer shut down");
}
}
public boolean isRunning() {
return channel != null;
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api.response;
import org.semux.api.ApiHandlerResponse;
import com.fasterxml.jackson.annotation.JsonProperty;
public class GetVoteResponse extends ApiHandlerResponse {
@JsonProperty("result")
public final Long vote;
public GetVoteResponse(
@JsonProperty("success") Boolean success,
@JsonProperty("result") Long vote) {
super(success, null);
this.vote = vote;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.integration;
import static org.awaitility.Awaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
import static org.powermock.api.mockito.PowerMockito.when;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import org.semux.IntegrationTest;
import org.semux.Kernel;
import org.semux.config.Config;
import org.semux.core.Genesis;
import org.semux.net.NodeManager;
import org.semux.rules.KernelRule;
@Category(IntegrationTest.class)
@RunWith(PowerMockRunner.class)
@PrepareForTest({ Genesis.class, NodeManager.class })
@PowerMockIgnore({ "jdk.internal.*", "javax.management.*" })
public class ConnectionTest {
@Rule
KernelRule kernelRule1 = new KernelRule(51610, 51710);
Thread serverThread;
List<Socket> sockets;
private final int netMaxInboundConnectionsPerIp = 5;
@Before
public void setUp() {
// mock genesis.json
Genesis genesis = mockGenesis();
mockStatic(Genesis.class);
when(Genesis.load(any())).thenReturn(genesis);
// configure kernel
// netMaxInboundConnectionsPerIp = 5
Config config = kernelRule1.getKernel().getConfig();
Whitebox.setInternalState(config, "netMaxInboundConnectionsPerIp", netMaxInboundConnectionsPerIp);
kernelRule1.getKernel().setConfig(config);
// start kernel
serverThread = new Thread(() -> kernelRule1.getKernel().start());
serverThread.start();
// await until the P2P server has started
await().until(() -> kernelRule1.getKernel().state() == Kernel.State.RUNNING
&& kernelRule1.getKernel().getP2p().isRunning());
// keep socket references
sockets = new CopyOnWriteArrayList<>();
}
@After
public void tearDown() throws InterruptedException {
// close all connections
sockets.parallelStream().filter(Objects::nonNull).forEach(socket -> {
try {
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
});
kernelRule1.getKernel().stop();
await().until(() -> kernelRule1.getKernel().state().equals(Kernel.State.STOPPED));
serverThread.interrupt();
await().until(() -> !serverThread.isAlive());
}
@Test
public void testConnectionLimit() throws InterruptedException, UnknownHostException {
// create 100 idle connections to the P2P server
final int connections = 100;
Collection<Callable<Void>> threads = new ArrayList<>();
ExecutorService executorService = Executors.newFixedThreadPool(100);
for (int i = 1; i <= connections; i++) {
threads.add(() -> {
try {
Socket socket = new Socket();
socket.connect(
new InetSocketAddress(kernelRule1.getKernel().getConfig().p2pListenIp(),
kernelRule1.getKernel().getConfig().p2pListenPort()),
100);
sockets.add(socket);
} catch (Exception e) {
e.printStackTrace();
}
return null;
});
}
List<Future<Void>> futures = executorService.invokeAll(threads);
await().until(() -> futures.stream().allMatch(Future::isDone));
TimeUnit.MILLISECONDS.sleep(500);
// the number of connections should be capped to netMaxInboundConnectionsPerIp
assertEquals(netMaxInboundConnectionsPerIp, kernelRule1.getKernel().getChannelManager().size());
}
@Test
public void testBlacklistIp() throws IOException, InterruptedException {
// create an idle connection
final int connections = 1;
Collection<Callable<Void>> threads = new ArrayList<>();
ExecutorService executorService = Executors.newFixedThreadPool(connections);
final List<InetSocketAddress> clientAddresses = new CopyOnWriteArrayList<>();
for (int i = 1; i <= connections; i++) {
threads.add(() -> {
try {
Socket socket = new Socket();
socket.bind(new InetSocketAddress("127.0.0.1", getFreePort()));
sockets.add(socket);
clientAddresses.add((InetSocketAddress) socket.getLocalSocketAddress());
socket.connect(
new InetSocketAddress(kernelRule1.getKernel().getConfig().p2pListenIp(),
kernelRule1.getKernel().getConfig().p2pListenPort()),
100);
} catch (Exception e) {
e.printStackTrace();
}
return null;
});
}
List<Future<Void>> futures = executorService.invokeAll(threads);
await().until(() -> futures.stream().allMatch(Future::isDone));
TimeUnit.MILLISECONDS.sleep(500);
// wait until all channels are connected
assertEquals(connections, kernelRule1.getKernel().getChannelManager().size());
// blacklist 127.0.0.1
final String blacklistedIp = "127.0.0.1";
kernelRule1.getKernel().getApiClient().request("add_to_blacklist", "ip", blacklistedIp);
// all IPs should stay connected except for the blacklisted IP
await().until(() -> kernelRule1.getKernel().getChannelManager().size() == connections - 1);
for (InetSocketAddress clientAddress : clientAddresses) {
if (clientAddress.getHostString().equals(blacklistedIp)) {
assertFalse(kernelRule1.getKernel().getChannelManager().isConnected(clientAddress));
} else {
assertTrue(kernelRule1.getKernel().getChannelManager().isConnected(clientAddress));
}
}
}
private int getFreePort() throws IOException {
ServerSocket serverSocket = new ServerSocket(0);
int port = serverSocket.getLocalPort();
serverSocket.close();
return port;
}
private Genesis mockGenesis() {
// mock premine
List<Genesis.Premine> premines = new ArrayList<>();
// mock delegates
HashMap<String, String> delegates = new HashMap<>();
// mock genesis
return Genesis.jsonCreator(0,
"0x0000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000000000000000000000000000",
1504742400000L,
"semux",
premines,
delegates,
new HashMap<>());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import java.util.Collections;
import java.util.Map;
import java.util.Random;
import org.apache.commons.collections4.map.LRUMap;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
public class CacheTest {
private static final Logger logger = LoggerFactory.getLogger(CacheTest.class);
@Test
public void testPerformance() {
Map<Integer, Integer> map = new LRUMap<>();
Map<Integer, Integer> mapSync = Collections.synchronizedMap(new LRUMap<>());
Cache<Integer, Integer> cache = Caffeine.newBuilder().build();
// warm up
int repeat = 1000;
Random r = new Random();
int bound = repeat * 10;
for (int i = 0; i < repeat; i++) {
map.put(r.nextInt(bound), r.nextInt(bound));
mapSync.put(r.nextInt(bound), r.nextInt(bound));
cache.put(r.nextInt(bound), r.nextInt(bound));
map.get(r.nextInt(bound));
mapSync.get(r.nextInt(bound));
cache.getIfPresent(r.nextInt(bound));
}
// write
long t1 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
map.put(r.nextInt(bound), r.nextInt(bound));
}
long t2 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
mapSync.put(r.nextInt(bound), r.nextInt(bound));
}
long t3 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
cache.put(r.nextInt(bound), r.nextInt(bound));
}
long t4 = System.nanoTime();
logger.info("Write: LRUMap = {} ns, LRUMap (synchronized) = {} ns, Caffeine = {} ns", t2 - t1, t3 - t2,
t4 - t3);
// read
t1 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
map.get(r.nextInt(bound));
}
t2 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
mapSync.get(r.nextInt(bound));
}
t3 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
cache.getIfPresent(r.nextInt(bound));
}
t4 = System.nanoTime();
logger.info("Read: LRUMap = {} ns, LRUMap (synchronized) = {} ns, Caffeine = {} ns", t2 - t1, t3 - t2,
t4 - t3);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.db;
import static org.fusesource.leveldbjni.JniDBFactory.factory;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.EnumMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang3.tuple.Pair;
import org.fusesource.leveldbjni.JniDBFactory;
import org.iq80.leveldb.CompressionType;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBIterator;
import org.iq80.leveldb.Options;
import org.iq80.leveldb.WriteBatch;
import org.semux.db.exception.DatabaseException;
import org.semux.util.ClosableIterator;
import org.semux.util.FileUtil;
import org.semux.util.SystemUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LeveldbDatabase implements Database {
private static final Logger logger = LoggerFactory.getLogger(LeveldbDatabase.class);
private File file;
private DB db;
private boolean isOpened;
public LeveldbDatabase(File file) {
this.file = file;
File dir = file.getParentFile();
if (!dir.exists() && !dir.mkdirs()) {
logger.error("Failed to create directory: {}", dir);
}
open(createOptions());
}
/**
* Creates the default options.
*
* @return
*/
protected Options createOptions() {
Options options = new Options();
options.createIfMissing(true);
options.compressionType(CompressionType.NONE);
options.blockSize(4 * 1024 * 1024);
options.writeBufferSize(8 * 1024 * 1024);
options.cacheSize(64L * 1024L * 1024L);
options.paranoidChecks(true);
options.verifyChecksums(true);
options.maxOpenFiles(128);
return options;
}
/**
* Open the database.
*
* @param options
*/
protected void open(Options options) {
try {
db = JniDBFactory.factory.open(file, options);
isOpened = true;
} catch (IOException e) {
if (e.getMessage().contains("Corruption")) {
// recover
recover(options);
// reopen
try {
db = JniDBFactory.factory.open(file, options);
isOpened = true;
} catch (IOException ex) {
logger.error("Failed to open database", e);
SystemUtil.exitAsync(SystemUtil.Code.FAILED_TO_OPEN_DB);
}
} else {
logger.error("Failed to open database", e);
SystemUtil.exitAsync(SystemUtil.Code.FAILED_TO_OPEN_DB);
}
}
}
/**
* Tries to recover the database in case of corruption.
*
* @param options
*/
protected void recover(Options options) {
try {
logger.info("Database is corrupted, trying to repair...");
factory.repair(file, options);
logger.info("Repair done!");
} catch (IOException ex) {
logger.error("Failed to repair the database", ex);
SystemUtil.exitAsync(SystemUtil.Code.FAILED_TO_REPAIR_DB);
}
}
@Override
public byte[] get(byte[] key) {
return db.get(key);
}
@Override
public void put(byte[] key, byte[] value) {
db.put(key, value);
}
@Override
public void delete(byte[] key) {
db.delete(key);
}
@Override
public void updateBatch(List<Pair<byte[], byte[]>> pairs) {
try (WriteBatch batch = db.createWriteBatch()) {
for (Pair<byte[], byte[]> p : pairs) {
if (p.getValue() == null) {
batch.delete(p.getLeft());
} else {
batch.put(p.getLeft(), p.getRight());
}
}
db.write(batch);
} catch (IOException e) {
logger.error("Failed to update batch", e);
SystemUtil.exitAsync(SystemUtil.Code.FAILED_TO_WRITE_BATCH_TO_DB);
}
}
@Override
public void close() {
try {
if (isOpened) {
db.close();
isOpened = false;
}
} catch (IOException e) {
logger.error("Failed to close database: {}", file, e);
}
}
@Override
public void destroy() {
close();
FileUtil.recursiveDelete(file);
}
@Override
public Path getDataDir() {
return file.toPath();
}
@Override
public ClosableIterator<Entry<byte[], byte[]>> iterator() {
return iterator(null);
}
@Override
public ClosableIterator<Entry<byte[], byte[]>> iterator(byte[] prefix) {
return new ClosableIterator<Entry<byte[], byte[]>>() {
DBIterator itr = db.iterator();
private ClosableIterator<Entry<byte[], byte[]>> initialize() {
if (prefix != null) {
itr.seek(prefix);
} else {
itr.seekToFirst();
}
return this;
}
@Override
public boolean hasNext() {
return itr.hasNext();
}
@Override
public Entry<byte[], byte[]> next() {
return itr.next();
}
@Override
public void close() {
try {
itr.close();
} catch (IOException e) {
throw new DatabaseException(e);
}
}
}.initialize();
}
public static class LevelDbFactory implements DatabaseFactory {
private EnumMap<DatabaseName, Database> databases = new EnumMap<>(DatabaseName.class);
private File dataDir;
private AtomicBoolean open;
public LevelDbFactory(File dataDir) {
this.dataDir = dataDir;
this.open = new AtomicBoolean(false);
open();
}
@Override
public void open() {
if (open.compareAndSet(false, true)) {
for (DatabaseName name : DatabaseName.values()) {
File file = Paths.get(dataDir.getAbsolutePath(), name.toString().toLowerCase()).toFile();
databases.put(name, new LeveldbDatabase(file));
}
}
}
@Override
public Database getDB(DatabaseName name) {
open();
return databases.get(name);
}
@Override
public void close() {
if (open.compareAndSet(true, false)) {
for (Database db : databases.values()) {
db.close();
}
}
}
@Override
public Path getDataDir() {
return dataDir.toPath();
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.dialog;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
import javax.swing.LayoutStyle.ComponentPlacement;
import org.apache.commons.lang3.StringUtils;
import org.semux.core.Wallet;
import org.semux.crypto.Hex;
import org.semux.crypto.Key;
import org.semux.gui.Action;
import org.semux.gui.AddressBookEntry;
import org.semux.gui.SemuxGui;
import org.semux.gui.SwingUtil;
import org.semux.message.GuiMessages;
import org.semux.util.exception.UnreachableException;
public class AddressBookUpdateDialog extends JDialog implements ActionListener {
private static final long serialVersionUID = 1L;
private static final int MAX_ADDRESS_NAME_LENGTH = 256;
private transient Wallet wallet;
private transient SemuxGui gui;
private JTextField nameText;
private JTextField addressText;
public AddressBookUpdateDialog(Window parent, AddressBookEntry entry, Wallet wallet, SemuxGui gui) {
super(parent,
entry != null ? GuiMessages.get("EditAddressBookEntry") : GuiMessages.get("AddAddressBookEntry"));
this.wallet = wallet;
this.gui = gui;
JLabel lblName = new JLabel("Name");
JLabel lblAddress = new JLabel("Address");
nameText = new JTextField(entry != null ? entry.getName() : "");
addressText = new JTextField(entry != null ? entry.getAddress() : "");
JButton btnCancel = SwingUtil.createDefaultButton(GuiMessages.get("Cancel"), this, Action.CANCEL);
JButton btnOk = SwingUtil.createDefaultButton(GuiMessages.get("OK"), this, Action.OK);
// @formatter:off
GroupLayout groupLayout = new GroupLayout(getContentPane());
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(32)
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING)
.addComponent(lblAddress)
.addComponent(lblName))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING, false)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(btnCancel)
.addGap(18)
.addComponent(btnOk))
.addComponent(addressText, GroupLayout.DEFAULT_SIZE, 400, Short.MAX_VALUE)
.addComponent(nameText))
.addContainerGap(32, Short.MAX_VALUE))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(40)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblName)
.addComponent(nameText, GroupLayout.PREFERRED_SIZE, 26, GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblAddress)
.addComponent(addressText, GroupLayout.PREFERRED_SIZE, 25, GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(btnCancel)
.addComponent(btnOk))
.addContainerGap(40, Short.MAX_VALUE))
);
getContentPane().setLayout(groupLayout);
// @formatter:on
this.setModal(true);
this.setDefaultCloseOperation(DISPOSE_ON_CLOSE);
this.setIconImage(SwingUtil.loadImage("logo", 128, 128).getImage());
this.pack();
this.setResizable(false);
this.setLocationRelativeTo(parent);
}
@Override
public void actionPerformed(ActionEvent e) {
Action action = Action.valueOf(e.getActionCommand());
switch (action) {
case OK:
String name = nameText.getText().trim();
String address = addressText.getText().trim();
if (StringUtils.isEmpty(name) || name.length() > MAX_ADDRESS_NAME_LENGTH) {
JOptionPane.showMessageDialog(this, GuiMessages.get("InvalidName"));
return;
}
if (StringUtils.isEmpty(address) || !address.matches("0x[a-z0-9]{" + Key.ADDRESS_LEN * 2 + "}")) {
JOptionPane.showMessageDialog(this, GuiMessages.get("InvalidAddress"));
break;
}
wallet.setAddressAlias(Hex.decode0x(address), name);
wallet.flush();
gui.updateModel();
this.dispose();
break;
case CANCEL:
this.dispose();
break;
default:
throw new UnreachableException();
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
public interface BlockchainListener {
/**
* Callback when a new block was added.
*
* @param block
*/
void onBlockAdded(Block block);
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.dialog;
import java.awt.Dialog;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JTextArea;
import javax.swing.LayoutStyle.ComponentPlacement;
import org.semux.core.Block;
import org.semux.crypto.Hex;
import org.semux.gui.SemuxGui;
import org.semux.gui.SwingUtil;
import org.semux.gui.model.WalletDelegate;
import org.semux.message.GuiMessages;
public class DelegateDialog extends JDialog {
private static final long serialVersionUID = 1L;
public DelegateDialog(SemuxGui gui, JFrame parent, WalletDelegate d) {
super(null, GuiMessages.get("Delegate"), Dialog.ModalityType.MODELESS);
setName("DelegateDialog");
Block block = gui.getKernel().getBlockchain().getBlock(d.getRegisteredAt());
JLabel lblName = new JLabel(GuiMessages.get("Name") + ":");
JLabel lblAddress = new JLabel(GuiMessages.get("Address") + ":");
JLabel lblRegisteredAt = new JLabel(GuiMessages.get("RegisteredAt") + ":");
JLabel lblVotes = new JLabel(GuiMessages.get("Votes") + ":");
JLabel lblVotesFromMe = new JLabel(GuiMessages.get("VotesFromMe") + ":");
JLabel lblNumOfBlocksForged = new JLabel(GuiMessages.get("NumBlocksForged") + ":");
JLabel lblNumOfTurnsHit = new JLabel(GuiMessages.get("NumTurnsHit") + ":");
JLabel lblNumOfTurnsMissed = new JLabel(GuiMessages.get("NumTurnsMissed") + ":");
JLabel lblRate = new JLabel(GuiMessages.get("Rate") + ":");
JTextArea name = SwingUtil.textAreaWithCopyPopup(d.getNameString());
JTextArea address = SwingUtil.textAreaWithCopyPopup(Hex.encode0x(d.getAddress()));
JLabel registeredAt = new JLabel(SwingUtil.formatTimestamp(block.getTimestamp()));
JLabel votes = new JLabel(SwingUtil.formatVote(d.getVotes()));
votes.setName("votes");
JLabel votesFromMe = new JLabel(SwingUtil.formatVote(d.getVotesFromMe()));
JLabel numOfBlocksForged = new JLabel(SwingUtil.formatNumber(d.getNumberOfBlocksForged()));
JLabel numOfTurnsHit = new JLabel(SwingUtil.formatNumber(d.getNumberOfTurnsHit()));
JLabel numOfTurnsMissed = new JLabel(SwingUtil.formatNumber(d.getNumberOfTurnsMissed()));
JLabel rate = new JLabel(SwingUtil.formatPercentage(d.getRate()));
// @formatter:off
GroupLayout groupLayout = new GroupLayout(getContentPane());
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(30)
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING)
.addComponent(lblRate)
.addComponent(lblNumOfTurnsMissed)
.addComponent(lblNumOfTurnsHit)
.addComponent(lblNumOfBlocksForged)
.addComponent(lblVotesFromMe)
.addComponent(lblVotes)
.addComponent(lblRegisteredAt)
.addComponent(lblAddress)
.addComponent(lblName))
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(name)
.addComponent(address)
.addComponent(votes)
.addComponent(votesFromMe)
.addComponent(registeredAt)
.addComponent(numOfBlocksForged)
.addComponent(numOfTurnsHit)
.addComponent(numOfTurnsMissed)
.addComponent(rate))
.addContainerGap(30, Short.MAX_VALUE))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGap(30)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblName)
.addComponent(name, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblAddress)
.addComponent(address, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblRegisteredAt)
.addComponent(registeredAt))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblVotes)
.addComponent(votes))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblVotesFromMe)
.addComponent(votesFromMe))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblNumOfBlocksForged)
.addComponent(numOfBlocksForged))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblNumOfTurnsHit)
.addComponent(numOfTurnsHit))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblNumOfTurnsMissed)
.addComponent(numOfTurnsMissed))
.addPreferredGap(ComponentPlacement.UNRELATED)
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblRate)
.addComponent(rate))
.addContainerGap(30, Short.MAX_VALUE))
);
getContentPane().setLayout(groupLayout);
// @formatter:on
this.setTitle(GuiMessages.get("Delegate"));
this.setDefaultCloseOperation(DISPOSE_ON_CLOSE);
this.setIconImage(SwingUtil.loadImage("logo", 128, 128).getImage());
this.pack();
this.setLocationRelativeTo(parent);
this.setResizable(false);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.filter;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CopyOnWriteArrayList;
import org.semux.net.filter.exception.IpFilterJsonParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import io.netty.handler.ipfilter.IpFilterRuleType;
/**
* SemuxIpFilter is responsible for matching IP address of incoming connection
* against defined rules in ipconfig.json
*
* <p>
* Example Definition of Blacklisting IP Addresses:
* <p>
* <blockquote>
*
* <pre>
* {
* "rules": [
* {"type": "REJECT", "address": "1.2.3.4"},
* {"type": "REJECT", "address": "5.6.7.8"}
* ]
* }
* </pre>
*
* </blockquote>
* </p>
* Example Definition of Whitelisting Local Networks:
* <p>
* <blockquote>
*
* <pre>
* {
* "rules": [
* {"type": "ACCEPT", "address": "127.0.0.1/8"},
* {"type": "ACCEPT", "address": "192.168.0.0/16"},
* {"type": "REJECT", "address": "0.0.0.0/0"}
* ]
* }
* </pre>
*
* </blockquote>
* </p>
*/
public class SemuxIpFilter {
private static final Logger logger = LoggerFactory.getLogger(SemuxIpFilter.class);
/**
* The default name of ipfilter config file.
*/
public static final String CONFIG_FILE = "ipfilter.json";
/**
* CopyOnWriteArrayList allows APIs to update rules atomically without affecting
* the performance of read-only iteration
*/
private CopyOnWriteArrayList<FilterRule> rules;
public SemuxIpFilter(List<FilterRule> rules) {
this.rules = new CopyOnWriteArrayList<>(rules);
}
public SemuxIpFilter() {
this.rules = new CopyOnWriteArrayList<>();
}
public List<FilterRule> getRules() {
return rules;
}
/**
* isAcceptable method matches supplied address against defined rules
* sequentially and returns a result based on the first matched rule's type
*
* @param address
* an address which will be matched against defined rules
* @return whether the address is blocked or not
*/
public boolean isAcceptable(InetSocketAddress address) {
return rules.stream().filter(rule -> rule != null && rule.matches(address)).findFirst().flatMap(rule -> {
if (rule.ruleType() == IpFilterRuleType.ACCEPT) {
return Optional.of(true);
} else {
return Optional.of(false);
}
}).orElse(true);
}
/**
* Block a single IP at runtime
*
* @param ip
* The IP address to be blacklisted
* @throws UnknownHostException
*/
public void blacklistIp(String ip) throws UnknownHostException {
// prepend a REJECT IP rule to the rules list to ensure that the IP will be
// blocked
FilterRule rule = new FilterRule(ip, IpFilterRuleType.REJECT);
rules.remove(rule); // remove duplicated rule
rules.add(0, rule); // prepend rule
logger.info("Blacklisted IP {}", ip);
}
/**
* Whitelist a single IP at runtime
*
* @param ip
* The IP address to be whitelisted
* @throws UnknownHostException
*/
public void whitelistIp(String ip) throws UnknownHostException {
// prepend an ACCEPT IP rule to the rules list to ensure that the IP will be
// accepted
FilterRule rule = new FilterRule(ip, IpFilterRuleType.ACCEPT);
rules.remove(rule); // remove duplicated rule
rules.add(0, rule); // prepend rule
logger.info("Whitelisted IP {}", ip);
}
/**
* Append a rule to the rear of rules list
*
* @param rule
* The rule to be appended
*/
public void appendRule(FilterRule rule) {
rules.add(rule);
}
/**
* Remove all rules
*/
public void purgeRules() {
rules.clear();
}
/**
* Persist rules into target path.
*
* @param path
* the path where rules will be persisted at.
*/
public void persist(Path path) {
new SemuxIpFilter.Saver().save(path, this);
}
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public static SemuxIpFilter jsonCreator(
@JsonProperty(value = "rules", required = true) List<FilterRule> rules) {
return new SemuxIpFilter(rules);
}
/**
* Builder is an object builder of SemuxIpFilter.
* <p>
* <blockquote>
*
* <pre>
* SemuxIpFilter ipFilter = new Builder().accept("127.0.0.1").accept("192.168.0.0/16").reject("0.0.0.0/0").build();
* </pre>
*
* </blockquote>
* </p>
* is equivalent to the definition of:
* <p>
* <blockquote>
*
* <pre>
* {
* "rules": [
* {"type": "ACCEPT", "address": "127.0.0.1/8"},
* {"type": "ACCEPT", "address": "192.168.0.0/16"},
* {"type": "REJECT", "address": "0.0.0.0/0"}
* ]
* }
* </pre>
*
* </blockquote>
* </p>
*/
public static final class Builder {
private ArrayList<FilterRule> rules = new ArrayList<>();
private void addRule(String cidrNotation, IpFilterRuleType type) throws UnknownHostException {
FilterRule ipSubnetFilterRule = new FilterRule(cidrNotation, type);
rules.add(ipSubnetFilterRule);
}
public Builder accept(String cidrNotation) throws UnknownHostException {
addRule(cidrNotation, IpFilterRuleType.ACCEPT);
return this;
}
public Builder reject(String cidrNotation) throws UnknownHostException {
addRule(cidrNotation, IpFilterRuleType.REJECT);
return this;
}
public List<FilterRule> getRules() {
return rules;
}
public SemuxIpFilter build() {
return new SemuxIpFilter(rules);
}
}
/**
* ${@link SemuxIpFilter.Loader} is responsible for loading ipfilter.json file
* into an instance of SemuxIpFilter.
*/
public static final class Loader {
public SemuxIpFilter load(Path path) {
try {
if (path.toFile().exists()) {
return new ObjectMapper().readValue(path.toFile(),
SemuxIpFilter.class);
} else {
return new SemuxIpFilter();
}
} catch (IOException e) {
throw new IpFilterJsonParseException(String.format(
"Failed to parse %s. The file may be corrupted. Please either remove it or create a valid JSON file.",
path.toAbsolutePath().toString()), e);
}
}
}
/**
* ${@link SemuxIpFilter.Saver} is responsible for persisting the state of a
* ${@link SemuxIpFilter} instance.
*/
public static final class Saver {
public void save(Path path, SemuxIpFilter ipFilter) {
try {
if (!path.getParent().toFile().exists()) {
Files.createDirectories(path.getParent());
}
new ObjectMapper().writer(SerializationFeature.INDENT_OUTPUT).writeValue(path.toFile(), ipFilter);
} catch (IOException e) {
logger.error("Failed to save ip filter", e);
}
}
}
}
<file_sep>Address = Address {0}
ChangeWalletPassword = Change password of the wallet
EnterNewPassword = Please enter the new password:
ReEnterNewPassword = Please re-enter the new password:
ReEnterNewPasswordIncorrect = New password doesn't match confirmation
ShowVersion = Show the version of this client
PrintHelp = Print help info and exit
ChooseAction = action can be one of:\ncreate - Create an new account and exit\nlist - List all accounts and exit
SpecifyDataDir = Specify the data directory
SpecifyCoinbase = Specify which account to be used as coinbase
WalletPassword = <PASSWORD> of the wallet
PrintHexKey= Prints the hexadecimal private key of an address
ImportHexKey = Imports a hexadecimal private key into the wallet
AddressNotInWallet = This address doesn't exist in the wallet
PasswordChangedSuccessfully = Password is successfully changed
WalletFileCannotBeUpdated = The wallet file cannot be updated
PrivateKeyAlreadyInWallet = The private key is already existent in the wallet
PrivateKeyImportedSuccessfully = The private key is successfully imported
PrivateKeyCannotBeDecoded = The private key cannot be decoded: {0}
PrivateKeyIs = Your private key is: {0}
PublicKey = Public key = {0}
PrivateKey = Private key = {0}
AccountMissing = There is no account in your wallet!
ParsingFailed = Parsing failed. Reason: {0}
NewAccountCreated = A new account has been created and stored in your wallet.
NewAccountCreatedForAddress = A new account has been created for you: address = {0}
CoinbaseDoesNotExist = Coinbase does not exist
ListAccountItem = Account #{0} = {1}
SpecifyNetwork = Specify the network: mainnet, testnet or devnet
CreateNewWalletError = Unable to create a new wallet.
WrongPassword = Incorrect password<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import java.util.List;
import java.util.Map;
import org.semux.consensus.ValidatorActivatedFork;
import org.semux.core.BlockchainImpl.ValidatorStats;
import org.semux.core.state.AccountState;
import org.semux.core.state.DelegateState;
public interface Blockchain {
/**
* Returns the latest block.
*
* @return
*/
Block getLatestBlock();
/**
* Returns the hash of the latest block.
*
* @return
*/
byte[] getLatestBlockHash();
/**
* Returns the number of the latest block.
*
* @return
*/
long getLatestBlockNumber();
/**
* Returns block number by hash.
*
* @param hash
* @return
*/
long getBlockNumber(byte[] hash);
/**
* Returns genesis block.
*
* @return
*/
Genesis getGenesis();
/**
* Returns block by number.
*
* @param number
* @return
*/
Block getBlock(long number);
/**
* Returns block by its hash.
*
* @param hash
* @return
*/
Block getBlock(byte[] hash);
/**
* Returns block header by block number.
*
* @param number
* @return
*/
BlockHeader getBlockHeader(long number);
/**
* Returns block header by block hash.
*
* @param hash
* @return
*/
BlockHeader getBlockHeader(byte[] hash);
/**
* Returns whether the block is existing.
*
* @param number
* @return
*/
boolean hasBlock(long number);
/**
* Returns transaction by its hash.
*
* @param hash
* @return
*/
Transaction getTransaction(byte[] hash);
/**
* Returns whether the transaction is in the blockchain.
*
* @param hash
* @return
*/
boolean hasTransaction(byte[] hash);
/**
* Returns transaction result.
*
* @param hash
* @return
*/
TransactionResult getTransactionResult(byte[] hash);
/**
* Returns the block number of the given transaction.
*
* @param hash
* @return
*/
long getTransactionBlockNumber(byte[] hash);
/**
* Returns the total number of transactions from/to the given address.
*
* @param address
* account address
* @return
*/
int getTransactionCount(byte[] address);
/**
* Returns transactions from/to an address.
*
* @param address
* account address
* @param from
* transaction index from
* @param to
* transaction index to
* @return
*/
List<Transaction> getTransactions(byte[] address, int from, int to);
/**
* Add a block to the chain.
*
* @param block
*/
void addBlock(Block block);
/**
* Returns account state.
*
* @return
*/
AccountState getAccountState();
/**
* Returns delegate state.
*
* @return
*/
DelegateState getDelegateState();
/**
* Returns the validator set based on current state.
*
* @return the peerIds of validators
*/
List<String> getValidators();
/**
* Returns the statistics of a validator.
*
* @param address
* @return
*/
ValidatorStats getValidatorStats(byte[] address);
/**
* Get currently activated forks.
*
* @return
*/
Map<ValidatorActivatedFork, ValidatorActivatedFork.Activation> getActivatedForks();
/**
* Register a blockchain listener.
*
* @param listener
*/
void addListener(BlockchainListener listener);
/**
* Checks whether a fork is activated at a certain blockchain height.
*
* @param number
* The number of blockchain height to check.
* @param fork
* An instance of ${@link ValidatorActivatedFork} to check.
* @return
*/
boolean forkActivated(long number, ValidatorActivatedFork fork);
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.crypto;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.semux.util.Bytes;
public class CryptoExceptionTest {
@Test(expected = CryptoException.class)
public void testCryptoException() throws CryptoException {
Aes.decrypt(Bytes.EMPTY_BYTES, Bytes.EMPTY_BYTES, Bytes.EMPTY_BYTES);
}
@Test
public void testConstructor() {
String msg = "test";
Throwable th = new Throwable();
CryptoException e = new CryptoException(msg, th);
assertEquals(msg, e.getMessage());
assertEquals(th, e.getCause());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.consensus;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.Collections;
import org.junit.Test;
import org.semux.core.BlockHeader;
import org.semux.crypto.Key;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
public class BlockHeaderMessageTest {
@Test
public void testSerialization() {
long number = 1;
byte[] coinbase = Bytes.random(Key.ADDRESS_LEN);
byte[] prevHash = Bytes.random(32);
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(Collections.emptyList());
byte[] resultsRoot = MerkleUtil.computeResultsRoot(Collections.emptyList());
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
BlockHeaderMessage m = new BlockHeaderMessage(header);
BlockHeaderMessage m2 = new BlockHeaderMessage(m.getEncoded());
assertThat(m2.getHeader()).isEqualToComparingFieldByField(header);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.message;
import static org.junit.Assert.assertNotNull;
import java.util.MissingResourceException;
import org.junit.Test;
public class CLIMessageTest {
@Test
public void testExists() {
assertNotNull(CliMessages.get("Address"));
}
@Test(expected = MissingResourceException.class)
public void testNotExists() {
assertNotNull(CliMessages.get("NotExist"));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.semux.core.Amount.Unit.NANO_SEM;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import org.semux.Network;
import org.semux.core.Amount;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.TransactionType;
import org.semux.crypto.Key;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
public class ProposalTest {
@Test
public void testBasics() {
Network network = Network.DEVNET;
TransactionType type = TransactionType.TRANSFER;
byte[] to = Bytes.random(20);
Amount value = NANO_SEM.of(2);
Amount fee = NANO_SEM.of(50_000_000L);
long nonce = 1;
long timestamp = System.currentTimeMillis();
byte[] data = Bytes.of("data");
Transaction tx = new Transaction(network, type, to, value, fee, nonce, timestamp, data);
tx.sign(new Key());
TransactionResult res = new TransactionResult(true);
long height = Long.MAX_VALUE;
int view = Integer.MAX_VALUE;
Block block = createBlock(height, Collections.singletonList(tx), Collections.singletonList(res));
Vote vote = Vote.newReject(VoteType.VALIDATE, height, view - 1);
vote.sign(new Key());
Proof proof = new Proof(height, view, Collections.singletonList(vote));
Proposal p = new Proposal(proof, block.getHeader(), block.getTransactions());
Key key = new Key();
p.sign(key);
assertThat(p.getTransactions(), contains(tx));
assertThat(p.getSignature().getAddress(), equalTo(key.toAddress()));
}
@Test
public void testProposal() {
long height = Long.MAX_VALUE;
int view = Integer.MAX_VALUE;
Block block = createBlock(height, Collections.emptyList(), Collections.emptyList());
Vote vote = Vote.newReject(VoteType.VALIDATE, height, view - 1);
vote.sign(new Key());
Proof proof = new Proof(height, view, Collections.singletonList(vote));
Proposal p = new Proposal(proof, block.getHeader(), block.getTransactions());
assertFalse(p.validate());
p.sign(new Key());
assertTrue(p.validate());
assertTrue(!p.toString().startsWith("java.lang.Object"));
Proposal p2 = Proposal.fromBytes(p.toBytes());
assertEquals(height, p2.getHeight());
assertEquals(view, p2.getView());
assertArrayEquals(block.getHash(), p2.getBlockHeader().getHash());
assertEquals(1, p2.getProof().getVotes().size());
assertArrayEquals(vote.getBlockHash(), p2.getProof().getVotes().get(0).getBlockHash());
}
private Block createBlock(long number, List<Transaction> txs, List<TransactionResult> res) {
Key key = new Key();
byte[] coinbase = key.toAddress();
byte[] prevHash = Bytes.EMPTY_HASH;
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(txs);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(res);
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
return new Block(header, txs, res);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.p2p;
import org.semux.net.msg.Message;
import org.semux.net.msg.MessageCode;
import org.semux.util.Bytes;
// NOTE: GetNodesMessage is encoded into a single empty frame.
public class GetNodesMessage extends Message {
/**
* Create a GET_NODES message.
*
*/
public GetNodesMessage() {
super(MessageCode.GET_NODES, NodesMessage.class);
this.encoded = Bytes.EMPTY_BYTES;
}
/**
* Parse a GET_NODES message from byte array.
*
* @param encoded
*/
public GetNodesMessage(byte[] encoded) {
super(MessageCode.GET_NODES, NodesMessage.class);
this.encoded = encoded;
}
@Override
public String toString() {
return "GetNodesMessage";
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
public enum Action {
CREATE_ACCOUNT,
RECOVER_ACCOUNTS,
OK,
CANCEL,
SHOW_HOME,
SHOW_SEND,
SHOW_RECEIVE,
SHOW_TRANSACTIONS,
SHOW_DELEGATES,
LOCK,
SEND,
CLEAR,
COPY_ADDRESS,
NEW_ACCOUNT,
DELETE_ACCOUNT,
VOTE,
UNVOTE,
DELEGATE,
REFRESH,
SELECT_ACCOUNT,
SELECT_DELEGATE,
BACKUP_WALLET,
CHANGE_PASSWORD,
EXIT,
ABOUT,
CONSOLE,
HELP,
SHOW_ADDRESS_BOOK,
ADD_ADDRESS,
EDIT_ADDRESS,
DELETE_ADDRESS,
IMPORT_PRIVATE_KEY,
EXPORT_PRIVATE_KEY,
COPY_PRIVATE_KEY
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.integration;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.reflect.Whitebox.setInternalState;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.semux.IntegrationTest;
import org.semux.config.Config;
import org.semux.consensus.ValidatorActivatedFork;
import org.semux.core.Genesis;
import org.semux.net.NodeManager;
/**
* The test ensures that a client that disabled the fork can still accept blocks
* from validators who activated the fork.
*/
@Category(IntegrationTest.class)
@RunWith(PowerMockRunner.class)
@PrepareForTest({ Genesis.class, NodeManager.class, ValidatorActivatedFork.class })
public class UniformDistributionForkSyncingCompatibilityTest extends SyncingTest {
@Override
protected int targetHeight() {
// it needs more blocks to cover the cases that validators disagree with each
// other
return 5;
}
@Override
public void beforeStart() {
super.beforeStart();
// forcibly activate the fork
ValidatorActivatedFork fork = mock(ValidatorActivatedFork.class);
setInternalState(fork, "number", (short) 1);
setInternalState(fork, "name", "UNIFORM_DISTRIBUTION");
setInternalState(fork, "activationBlocks", 0);
setInternalState(fork, "activationBlocksLookup", 0);
setInternalState(ValidatorActivatedFork.class, "UNIFORM_DISTRIBUTION", fork);
// disable the fork on kernel3 (validator)
Config config3 = kernelRule3.getKernel().getConfig();
setInternalState(config3, "forkUniformDistributionEnabled", false);
kernelRule3.getKernel().setConfig(config3);
// disable the fork on kernel4 (client)
Config config4 = kernelRule4.getKernel().getConfig();
setInternalState(config4, "forkUniformDistributionEnabled", false);
kernelRule4.getKernel().setConfig(config4);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.panel;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import static org.semux.core.Amount.Unit.MILLI_SEM;
import static org.semux.core.Amount.Unit.NANO_SEM;
import static org.semux.core.Amount.Unit.SEM;
import java.util.Collections;
import org.assertj.swing.edt.GuiActionRunner;
import org.assertj.swing.fixture.FrameFixture;
import org.assertj.swing.junit.testcase.AssertJSwingJUnitTestCase;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.semux.KernelMock;
import org.semux.core.Amount;
import org.semux.core.Blockchain;
import org.semux.core.PendingManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionType;
import org.semux.core.state.Account;
import org.semux.core.state.DelegateState;
import org.semux.crypto.Key;
import org.semux.gui.model.WalletAccount;
import org.semux.gui.model.WalletModel;
import org.semux.message.GuiMessages;
import org.semux.rules.KernelRule;
import org.semux.util.Bytes;
@RunWith(MockitoJUnitRunner.class)
public class TransactionsPanelTest extends AssertJSwingJUnitTestCase {
@Rule
public KernelRule kernelRule = new KernelRule(51610, 51710);
@Mock
WalletModel walletModel;
TransactionsPanelTestApplication application;
FrameFixture window;
@Override
protected void onSetUp() {
}
@Test
public void testTransactions() {
Key key = new Key();
Amount $1 = NANO_SEM.of(1);
WalletAccount acc = spy(new WalletAccount(key, new Account(key.toAddress(), $1, $1, 1), null));
Transaction tx = new Transaction(kernelRule.getKernel().getConfig().network(),
TransactionType.TRANSFER,
Bytes.random(Key.ADDRESS_LEN),
SEM.of(1),
MILLI_SEM.of(10),
0,
System.currentTimeMillis(),
Bytes.EMPTY_BYTES);
tx.sign(new Key());
acc.setTransactions(Collections.singletonList(tx));
// mock walletModel
when(walletModel.getAccounts()).thenReturn(Collections.singletonList(acc));
// mock kernel
KernelMock kernelMock = spy(kernelRule.getKernel());
Blockchain chain = mock(Blockchain.class);
DelegateState ds = mock(DelegateState.class);
PendingManager pendingManager = mock(PendingManager.class);
when(ds.getDelegateByAddress(any())).thenReturn(null);
when(chain.getDelegateState()).thenReturn(ds);
when(kernelMock.getBlockchain()).thenReturn(chain);
when(kernelMock.getPendingManager()).thenReturn(pendingManager);
application = GuiActionRunner.execute(() -> new TransactionsPanelTestApplication(walletModel, kernelMock));
window = new FrameFixture(robot(), application);
window.show().requireVisible().moveToFront();
assertEquals(1, window.table("transactionsTable").rowCount());
window.table("transactionsTable").cell(TransactionType.TRANSFER.name()).doubleClick();
window.dialog().requireVisible();
assertEquals(GuiMessages.get("Transaction"), window.dialog().target().getTitle());
}
}
<file_sep># Welcome to Semux!
[](https://travis-ci.org/semuxproject/semux)
[](https://ci.appveyor.com/project/semux/semux)
[](https://coveralls.io/github/semuxproject/semux)
[](https://crowdin.com/project/semux)
## What is Semux
Semux is an experimental high-performance blockchain platform that powers decentralized application. It's written purely in Java and powered by Semux BFT consensus algorithm.
More info can be found at our [Wiki page](https://github.com/semuxproject/semux/wiki).
## Get started
1. Download and install [Java SE Runtime Environment 8](http://www.oracle.com/technetwork/java/javase/downloads/jre8-downloads-2133155.html) or above
2. *(Windows user) Download and install [Microsoft Visual C++ Redistributable for Visual Studio 2012 Update 4](https://www.microsoft.com/en-us/download/details.aspx?id=30679)*
3. Download the [Latest Wallet Release](https://github.com/semuxproject/semux/releases) and unpack it to a desired directory.
4. Run ``semux.exe`` if you're on Windows; run ``./semux-gui.sh`` or ``./semux-cli.sh`` if you're on Linux or macOS.
## Build from source
Prerequisites:
```
Java SE Development Kit 8 or above
Apache Maven 3.5.2
```
Build:
```
git clone https://github.com/semuxproject/semux
cd semux
mvn install -DskipTests
```
Run:
```
./dist/linux/semux-cli.sh
```
## Contribute
Anyone is welcome to contribute to this open source project in the form of peer review, testing and patches. Please see the [contributing](./.github/contributing.md) guide for more details.
If you found a bug, please submit it to [issues](https://github.com/semuxproject/semux/issues).
## Wallet Localization
If you want to add new language, review/update existing translation or help to finish specific translations, you can join and do that by following link:
https://crowdin.com/project/semux
## License
[The MIT License](./LICENSE)
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
public class FrameTest {
@Test
public void testReadAndWrite() {
short version = 0x1122;
byte compressType = 0x33;
byte packetType = 0x44;
int packetId = 0x55667788;
int packetSize = 0x99aabbcc;
int bodySize = 0xddeeff00;
Frame frame = new Frame(version, compressType, packetType, packetId, packetSize, bodySize, null);
ByteBuf buf = Unpooled.copiedBuffer(new byte[Frame.HEADER_SIZE]);
buf.writerIndex(0);
frame.writeHeader(buf);
buf.readerIndex(0);
Frame frame2 = Frame.readHeader(buf);
assertThat(frame2).isEqualToComparingFieldByFieldRecursively(frame);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static org.semux.core.Amount.neg;
import static org.semux.core.Amount.sub;
import static org.semux.core.Amount.sum;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.semux.config.Config;
import org.semux.core.TransactionResult.Error;
import org.semux.core.state.Account;
import org.semux.core.state.AccountState;
import org.semux.core.state.DelegateState;
import org.semux.util.Bytes;
/**
* Transaction executor
*/
public class TransactionExecutor {
private static final boolean[] valid = new boolean[256];
static {
for (byte b : Bytes.of("abcdefghijklmnopqrstuvwxyz0123456789_")) {
valid[b & 0xff] = true;
}
}
/**
* Validate delegate name.
*
* @param data
*/
public static boolean validateDelegateName(byte[] data) {
if (data.length < 3 || data.length > 16) {
return false;
}
for (byte b : data) {
if (!valid[b & 0xff]) {
return false;
}
}
return true;
}
private Config config;
/**
* Creates a new transaction executor.
*
* @param config
*/
public TransactionExecutor(Config config) {
this.config = config;
}
/**
* Execute a list of transactions.
*
* NOTE: transaction format and signature are assumed to be success.
*
* @param txs
* transactions
* @param as
* account state
* @param ds
* delegate state
* @return
*/
public List<TransactionResult> execute(List<Transaction> txs, AccountState as, DelegateState ds) {
List<TransactionResult> results = new ArrayList<>();
for (Transaction tx : txs) {
TransactionResult result = new TransactionResult(false);
results.add(result);
TransactionType type = tx.getType();
byte[] from = tx.getFrom();
byte[] to = tx.getTo();
Amount value = tx.getValue();
long nonce = tx.getNonce();
Amount fee = tx.getFee();
byte[] data = tx.getData();
Account acc = as.getAccount(from);
Amount available = acc.getAvailable();
Amount locked = acc.getLocked();
// check nonce
if (nonce != acc.getNonce()) {
result.setError(Error.INVALID_NONCE);
continue;
}
// check fee
if (fee.lt(config.minTransactionFee())) {
result.setError(Error.INVALID_FEE);
continue;
}
// check data length
if (data.length > config.maxTransactionDataSize(type)) {
result.setError(Error.INVALID_DATA_LENGTH);
continue;
}
switch (type) {
case TRANSFER: {
if (fee.lte(available) && value.lte(available) && sum(value, fee).lte(available)) {
as.adjustAvailable(from, neg(sum(value, fee)));
as.adjustAvailable(to, value);
result.setSuccess(true);
} else {
result.setError(Error.INSUFFICIENT_AVAILABLE);
}
break;
}
case DELEGATE: {
if (!validateDelegateName(data)) {
result.setError(Error.INVALID_DELEGATE_NAME);
break;
}
if (value.lt(config.minDelegateBurnAmount())) {
result.setError(Error.INVALID_DELEGATE_BURN_AMOUNT);
break;
}
if (fee.lte(available) && value.lte(available) && sum(value, fee).lte(available)) {
if (Arrays.equals(Bytes.EMPTY_ADDRESS, to) && ds.register(from, data)) {
as.adjustAvailable(from, neg(sum(value, fee)));
result.setSuccess(true);
} else {
result.setError(Error.FAILED);
}
} else {
result.setError(Error.INSUFFICIENT_AVAILABLE);
}
break;
}
case VOTE: {
if (fee.lte(available) && value.lte(available) && sum(value, fee).lte(available)) {
if (ds.vote(from, to, value)) {
as.adjustAvailable(from, neg(sum(value, fee)));
as.adjustLocked(from, value);
result.setSuccess(true);
} else {
result.setError(Error.FAILED);
}
} else {
result.setError(Error.INSUFFICIENT_AVAILABLE);
}
break;
}
case UNVOTE: {
if (available.lt(fee)) {
result.setError(Error.INSUFFICIENT_AVAILABLE);
break;
}
if (locked.lt(value)) {
result.setError(Error.INSUFFICIENT_LOCKED);
break;
}
if (ds.unvote(from, to, value)) {
as.adjustAvailable(from, sub(value, fee));
as.adjustLocked(from, neg(value));
result.setSuccess(true);
} else {
result.setError(Error.FAILED);
}
break;
}
default:
// unsupported transaction type
result.setError(Error.INVALID_TYPE);
break;
}
// increase nonce if success
if (result.isSuccess()) {
as.increaseNonce(from);
}
}
return results;
}
/**
* Execute one transaction.
*
* NOTE: transaction format and signature are assumed to be success.
*
* @param as
* account state
* @param ds
* delegate state
* @return
*/
public TransactionResult execute(Transaction tx, AccountState as, DelegateState ds) {
return execute(Collections.singletonList(tx), as, ds).get(0);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.semux.core.Amount.Unit.NANO_SEM;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.powermock.reflect.Whitebox;
import org.semux.Network;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.semux.config.DevnetConfig;
import org.semux.consensus.ValidatorActivatedFork;
import org.semux.core.BlockchainImpl.StatsType;
import org.semux.crypto.Key;
import org.semux.rules.TemporaryDatabaseRule;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
public class BlockchainImplTest {
@Rule
public TemporaryDatabaseRule temporaryDBFactory = new TemporaryDatabaseRule();
private Config config;
private BlockchainImpl chain;
private byte[] coinbase = Bytes.random(30);
private byte[] prevHash = Bytes.random(32);
private Network network = Network.DEVNET;
private Key key = new Key();
private byte[] from = key.toAddress();
private byte[] to = Bytes.random(20);
private Amount value = NANO_SEM.of(20);
private Amount fee = NANO_SEM.of(1);
private long nonce = 12345;
private byte[] data = Bytes.of("test");
private long timestamp = System.currentTimeMillis() - 60 * 1000;
private Transaction tx = new Transaction(network, TransactionType.TRANSFER, to, value, fee, nonce, timestamp,
data)
.sign(key);
private TransactionResult res = new TransactionResult(true);
@Before
public void setUp() {
config = new DevnetConfig(Constants.DEFAULT_DATA_DIR);
chain = new BlockchainImpl(config, temporaryDBFactory);
}
@Test
public void testGetLatestBlock() {
assertEquals(0, chain.getLatestBlockNumber());
assertNotNull(chain.getLatestBlockHash());
assertNotNull(chain.getLatestBlock());
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
assertNotEquals(0, chain.getLatestBlockNumber());
assertTrue(chain.getLatestBlock().getNumber() == newBlock.getNumber());
}
@Test
public void testGetLatestBlockHash() {
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
assertEquals(newBlock.getNumber(), chain.getLatestBlockNumber());
assertArrayEquals(newBlock.getHash(), chain.getLatestBlockHash());
}
@Test
public void testGetBlock() {
assertEquals(0, chain.getBlock(0).getNumber());
assertNull(chain.getBlock(1));
long number = 1;
Block newBlock = createBlock(number);
chain.addBlock(newBlock);
assertTrue(chain.getBlock(number).getNumber() == number);
assertTrue(chain.getBlock(newBlock.getHash()).getNumber() == number);
}
@Test
public void testHasBlock() {
assertFalse(chain.hasBlock(-1));
assertTrue(chain.hasBlock(0));
assertFalse(chain.hasBlock(1));
}
@Test
public void testGetBlockNumber() {
long number = 1;
Block newBlock = createBlock(number);
chain.addBlock(newBlock);
assertEquals(number, chain.getBlockNumber(newBlock.getHash()));
}
@Test
public void testGetGenesis() {
assertArrayEquals(Genesis.load(network).getHash(), chain.getGenesis().getHash());
}
@Test
public void testGetBlockHeader() {
assertArrayEquals(Genesis.load(network).getHash(), chain.getBlockHeader(0).getHash());
long number = 1;
Block newBlock = createBlock(number);
chain.addBlock(newBlock);
assertArrayEquals(newBlock.getHash(), chain.getBlockHeader(1).getHash());
assertEquals(newBlock.getNumber(), chain.getBlockHeader(newBlock.getHash()).getNumber());
}
@Test
public void testGetTransaction() {
assertNull(chain.getTransaction(tx.getHash()));
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
Transaction t = chain.getTransaction(tx.getHash());
assertNotNull(t);
assertTrue(Arrays.equals(from, t.getFrom()));
assertTrue(Arrays.equals(to, t.getTo()));
assertTrue(Arrays.equals(data, t.getData()));
assertEquals(value, t.getValue());
assertTrue(t.getNonce() == nonce);
assertTrue(t.getTimestamp() == timestamp);
}
@Test
public void testHasTransaction() {
assertFalse(chain.hasTransaction(tx.getHash()));
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
assertTrue(chain.hasTransaction(tx.getHash()));
}
@Test
public void testGetTransactionResult() {
assertNull(chain.getTransaction(tx.getHash()));
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
TransactionResult r = chain.getTransactionResult(tx.getHash());
assertArrayEquals(res.toBytes(), r.toBytes());
}
@Test
public void testGetTransactionBlockNumber() {
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
assertEquals(newBlock.getNumber(), chain.getTransactionBlockNumber(tx.getHash()));
}
@Test
public void testGetCoinbaseTransactionBlockNumber() {
for (int i = 1; i <= 10; i++) {
byte[] coinbase = new Key().toAddress();
Block newBlock = createBlock(i, coinbase, Bytes.EMPTY_BYTES, Collections.emptyList(),
Collections.emptyList());
chain.addBlock(newBlock);
List<Transaction> transactions = chain.getTransactions(coinbase, 0, 1);
assertEquals(1, transactions.size());
assertEquals(newBlock.getNumber(), transactions.get(0).getNonce());
assertEquals(TransactionType.COINBASE, transactions.get(0).getType());
assertEquals(newBlock.getNumber(), chain.getTransactionBlockNumber(transactions.get(0).getHash()));
}
}
@Test
public void testGetTransactionCount() {
assertNull(chain.getTransaction(tx.getHash()));
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
assertEquals(1, chain.getTransactionCount(tx.getFrom()));
}
@Test
public void testGetAccountTransactions() {
assertNull(chain.getTransaction(tx.getHash()));
Block newBlock = createBlock(1);
chain.addBlock(newBlock);
List<Transaction> txs = chain.getTransactions(tx.getFrom(), 0, 100);
assertEquals(1, txs.size());
assertArrayEquals(tx.toBytes(), txs.get(0).toBytes());
}
@Test
public void testSerialization() {
Block block1 = createBlock(1);
Block block2 = Block.fromBytes(block1.toBytesHeader(), block1.toBytesTransactions(), block1.toBytesResults(),
block1.toBytesVotes());
assertArrayEquals(block1.getHash(), block2.getHash());
assertArrayEquals(block1.getCoinbase(), block2.getCoinbase());
assertArrayEquals(block1.getParentHash(), block2.getParentHash());
assertEquals(block1.getNumber(), block2.getNumber());
assertEquals(block1.getTransactions().size(), block2.getTransactions().size());
}
@Test
public void testGetTransactions() {
Block block = createBlock(1);
chain.addBlock(block);
List<Transaction> list = chain.getTransactions(from, 0, 1024);
assertEquals(1, list.size());
assertArrayEquals(tx.getHash(), list.get(0).getHash());
list = chain.getTransactions(to, 0, 1024);
assertEquals(1, list.size());
assertArrayEquals(tx.getHash(), list.get(0).getHash());
}
@Test
public void testGetTransactionsSelfTx() {
Transaction selfTx = new Transaction(network, TransactionType.TRANSFER, key.toAddress(), value, fee, nonce,
timestamp, data).sign(key);
Block block = createBlock(
1,
Collections.singletonList(selfTx),
Collections.singletonList(res));
chain.addBlock(block);
// there should be only 1 transaction added into index database
List<Transaction> list = chain.getTransactions(key.toAddress(), 0, 1024);
assertEquals(1, list.size());
assertArrayEquals(selfTx.getHash(), list.get(0).getHash());
}
@Test
public void testValidatorStates() {
byte[] address = Bytes.random(20);
assertEquals(0, chain.getValidatorStats(address).getBlocksForged());
assertEquals(0, chain.getValidatorStats(address).getTurnsHit());
assertEquals(0, chain.getValidatorStats(address).getTurnsMissed());
chain.adjustValidatorStats(address, StatsType.FORGED, 1);
assertEquals(1, chain.getValidatorStats(address).getBlocksForged());
chain.adjustValidatorStats(address, StatsType.HIT, 1);
assertEquals(1, chain.getValidatorStats(address).getTurnsHit());
chain.adjustValidatorStats(address, StatsType.MISSED, 1);
assertEquals(1, chain.getValidatorStats(address).getTurnsMissed());
chain.adjustValidatorStats(address, StatsType.MISSED, 1);
assertEquals(2, chain.getValidatorStats(address).getTurnsMissed());
}
@Test
public void testForkActivated() {
final ValidatorActivatedFork fork = ValidatorActivatedFork.UNIFORM_DISTRIBUTION;
for (long i = 1; i <= fork.activationBlocksLookup; i++) {
chain.addBlock(
createBlock(i, coinbase, BlockHeaderData.v1(new BlockHeaderData.ForkSignalSet(fork)).toBytes(),
Collections.singletonList(tx), Collections.singletonList(res)));
if (i <= fork.activationBlocks) {
for (long j = 0; j <= i; j++) {
assertFalse(chain.forkActivated(i, fork));
}
} else {
for (long j = i; j > fork.activationBlocks; j--) {
assertTrue(chain.forkActivated(j, fork));
}
for (long j = fork.activationBlocks; j >= 0; j--) {
assertFalse(chain.forkActivated(j, fork));
}
}
}
for (long i = 0; i <= fork.activationBlocks; i++) {
assertFalse(chain.forkActivated(i, fork));
}
for (long i = fork.activationBlocks + 1; i <= fork.activationBlocksLookup; i++) {
assertTrue(chain.forkActivated(i, fork));
}
}
@Test
public void testForkCompatibility() {
ValidatorActivatedFork fork = ValidatorActivatedFork.UNIFORM_DISTRIBUTION;
Block block = createBlock(1, coinbase, BlockHeaderData.v1(new BlockHeaderData.ForkSignalSet(fork)).toBytes(),
Collections.singletonList(tx), Collections.singletonList(res));
Whitebox.setInternalState(config, "forkUniformDistributionEnabled", false);
chain = new BlockchainImpl(config, temporaryDBFactory);
chain.addBlock(block);
}
private Block createBlock(long number) {
return createBlock(number, Collections.singletonList(tx), Collections.singletonList(res));
}
private Block createBlock(long number, List<Transaction> transactions, List<TransactionResult> results) {
return createBlock(number, coinbase, Bytes.EMPTY_BYTES, transactions, results);
}
private Block createBlock(long number, byte[] coinbase, byte[] data, List<Transaction> transactions,
List<TransactionResult> results) {
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(transactions);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(results);
byte[] stateRoot = Bytes.EMPTY_HASH;
long timestamp = System.currentTimeMillis();
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
return new Block(header, transactions, results);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api;
import static java.lang.String.format;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.semux.crypto.Hex;
import org.semux.rules.KernelRule;
import org.semux.util.Bytes;
/**
* The test case covers validation rules of ApiHandlerImpl
*/
@RunWith(Parameterized.class)
public class ApiHandlerErrorTest extends ApiHandlerTestBase {
private static final String ADDRESS_PLACEHOLDER = "[wallet]";
@Rule
public KernelRule kernelRule = new KernelRule(51610, 51710);
@Parameters(name = "request(\"{0}\")")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{ "/add_node" },
{ "/add_node?node=I_am_not_a_node" },
{ "/add_node?node=127.0.0.1:65536" },
// { "/add_node?node=.com:5161" },
{ "/add_to_blacklist" },
{ "/add_to_blacklist?ip=I_am_not_an_IP" },
{ "/add_to_whitelist" },
{ "/add_to_whitelist?ip=I_am_not_an_IP" },
{ "/get_block" },
{ "/get_block?number=9999999999999999" },
{ "/get_account" },
{ "/get_account?address=0xabc" },
{ "/get_account?address=I_am_not_an_address" },
{ "/get_delegate" },
{ "/get_delegate?address=" + Hex.encode(Bytes.random(20)) },
{ "/get_delegate?address=I_am_not_an_address" },
{ "/get_account_transactions" },
{ "/get_account_transactions?address=I_am_not_an_address" },
{ format("/get_account_transactions?address=%s", randomHex()) },
{ format("/get_account_transactions?address=%s&from=%s", randomHex(), "I_am_not_a_number") },
{ format("/get_account_transactions?address=%s&from=%s&to=%s", randomHex(), "0", "I_am_not_a_number") },
{ "/get_transaction" },
{ format("/get_transaction?hash=%s", "I_am_not_a_hexadecimal_string") },
{ format("/get_transaction?hash=%s", randomHex()) },
{ "/send_transaction" },
{ "/send_transaction?raw=I_am_not_a_hexadecimal_string" },
{ "/get_vote" },
{ format("/get_vote?voter=%s", "I_am_not_a_valid_address") },
{ format("/get_vote?voter=%s", randomHex()) },
{ format("/get_vote?voter=%s&delegate=%s", randomHex(), "I_am_not_a_valid_address") },
{ "/get_votes" },
{ "/get_votes?delegate=I_am_not_hexadecimal_string" },
{ "/transfer" },
{ format("/transfer?from=%s", "_") }, // non-hexadecimal address
{ format("/transfer?from=%s", randomHex()) }, // non wallet address
{ format("/transfer?from=%s", ADDRESS_PLACEHOLDER) },
{ format("/transfer?from=%s&to=%s", ADDRESS_PLACEHOLDER, "_") }, // non-hexadecimal to
{ format("/transfer?from=%s&to=%s", ADDRESS_PLACEHOLDER, randomHex()) },
{ format("/transfer?from=%s&to=%s&value=%s", ADDRESS_PLACEHOLDER, randomHex(), "_") }, // non-number
{ format("/transfer?from=%s&to=%s&value=%s", ADDRESS_PLACEHOLDER, randomHex(), "10") },
{ format("/transfer?from=%s&to=%s&value=%s&fee=%s", ADDRESS_PLACEHOLDER, randomHex(), "10", "_") }, // non-number
{ format("/transfer?from=%s&to=%s&value=%s&fee=%s", ADDRESS_PLACEHOLDER, randomHex(), "10", "10") },
{ format("/transfer?from=%s&to=%s&value=%s&fee=%s&data=%s", ADDRESS_PLACEHOLDER, randomHex(), "10",
"10", "_") }, // non-hexadecimal data
{ format("/transfer?from=%s&to=%s&value=%s&fee=%s&data=%s", ADDRESS_PLACEHOLDER, randomHex(), "10",
"10", randomHex()) }, // hexadecimal data
{ "/get_transaction_limits" },
{ "/get_transaction_limits?type=XXX" },
});
}
@Parameter
public String uri;
@Before
public void setUp() {
api = new SemuxApiMock(kernelRule.getKernel());
api.start();
wallet = api.getKernel().getWallet();
config = api.getKernel().getConfig();
}
@After
public void tearDown() {
api.stop();
}
@Test
public void testError() throws IOException {
uri = uri.replace(ADDRESS_PLACEHOLDER, wallet.getAccount(0).toAddressString());
ApiHandlerResponse response = request(uri, ApiHandlerResponse.class);
assertFalse(response.success);
assertNotNull(response.message);
System.out.println(response.message);
}
private static String randomHex() {
return Hex.encode0x(Bytes.random(20));
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.consensus;
import org.semux.core.BlockHeader;
import org.semux.net.msg.Message;
import org.semux.net.msg.MessageCode;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
public class BlockHeaderMessage extends Message {
private BlockHeader header;
public BlockHeaderMessage(BlockHeader header) {
super(MessageCode.GET_BLOCK_HEADER, null);
this.header = header;
SimpleEncoder enc = new SimpleEncoder();
enc.writeBytes(header.toBytes());
this.encoded = enc.toBytes();
}
public BlockHeaderMessage(byte[] encoded) {
super(MessageCode.GET_BLOCK, null);
this.encoded = encoded;
SimpleDecoder dec = new SimpleDecoder(encoded);
byte[] bytes = dec.readBytes();
this.header = BlockHeader.fromBytes(bytes);
}
public BlockHeader getHeader() {
return header;
}
@Override
public String toString() {
return "BlockHeaderMessage [header=" + header + "]";
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import org.junit.Test;
import org.semux.crypto.Hex;
public class ByteArrayTest {
@Test
public void testInHashMap() {
byte[] b1 = Bytes.random(20);
byte[] b2 = Bytes.random(20);
byte[] b3 = Arrays.copyOf(b1, b1.length);
HashMap<ByteArray, Boolean> map = new HashMap<>();
map.put(ByteArray.of(b1), true);
assertFalse(map.containsKey(ByteArray.of(b2)));
assertTrue(map.containsKey(ByteArray.of(b3)));
}
@Test
public void testToString() {
byte[] b = Bytes.random(20);
assertEquals(Hex.encode(b), ByteArray.of(b).toString());
}
@Test
public void testByteArrayKeyDeserializer() throws IOException {
byte[] x = Bytes.random(3);
ByteArray.ByteArrayKeyDeserializer d = new ByteArray.ByteArrayKeyDeserializer();
Object y = d.deserializeKey(Hex.encode0x(x), null);
assertTrue(y instanceof ByteArray);
assertThat(ByteArray.of(x), equalTo(y));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.semux.config.Constants;
import org.semux.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BlockHeaderTest {
private static final Logger logger = LoggerFactory.getLogger(BlockHeaderTest.class);
private long number = 1;
private byte[] coinbase = Bytes.random(20);
private byte[] prevHash = Bytes.random(32);
private long timestamp = System.currentTimeMillis();
private byte[] transactionsRoot = Bytes.random(32);
private byte[] resultsRoot = Bytes.random(32);
private byte[] stateRoot = Bytes.random(32);
private byte[] data = Bytes.of("data");
private byte[] hash;
@Test
public void testNew() {
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
hash = header.getHash();
testFields(header);
}
@Test
public void testSerialization() {
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
hash = header.getHash();
testFields(BlockHeader.fromBytes(header.toBytes()));
}
@Test
public void testBlockHeaderSize() {
BlockHeader header = new BlockHeader(number, coinbase, prevHash, timestamp, transactionsRoot, resultsRoot,
stateRoot, data);
byte[] bytes = header.toBytes();
logger.info("block header size: {}", bytes.length);
logger.info("block header size (1y): {} GB",
1.0 * bytes.length * Constants.BLOCKS_PER_YEAR / 1024 / 1024 / 1024);
}
private void testFields(BlockHeader header) {
assertArrayEquals(hash, header.getHash());
assertEquals(number, header.getNumber());
assertArrayEquals(coinbase, header.getCoinbase());
assertArrayEquals(prevHash, header.getParentHash());
assertEquals(timestamp, header.getTimestamp());
assertArrayEquals(transactionsRoot, header.getTransactionsRoot());
assertArrayEquals(resultsRoot, header.getResultsRoot());
assertArrayEquals(stateRoot, header.getStateRoot());
assertArrayEquals(data, header.getData());
}
}
<file_sep># Additional JVM options can be added here
# -Xms512m
# -Xmx2048m
-Dlog4j2.garbagefreeThreadContextMap=true
-Dlog4j2.shutdownHookEnabled=false
-Dlog4j2.disableJmx=true<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.panel;
import static org.semux.core.Amount.sum;
import java.awt.Color;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.swing.BoxLayout;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.LayoutStyle.ComponentPlacement;
import javax.swing.SwingConstants;
import javax.swing.border.CompoundBorder;
import javax.swing.border.EmptyBorder;
import javax.swing.border.EtchedBorder;
import javax.swing.border.TitledBorder;
import org.semux.core.Block;
import org.semux.core.Transaction;
import org.semux.core.TransactionType;
import org.semux.crypto.Hex;
import org.semux.gui.Action;
import org.semux.gui.SemuxGui;
import org.semux.gui.SwingUtil;
import org.semux.gui.model.WalletAccount;
import org.semux.gui.model.WalletModel;
import org.semux.gui.model.WalletModel.Status;
import org.semux.message.GuiMessages;
import org.semux.util.ByteArray;
import org.semux.util.exception.UnreachableException;
public class HomePanel extends JPanel implements ActionListener {
private static final long serialVersionUID = 1L;
private static final int NUMBER_OF_TRANSACTIONS = 5;
private static final EnumSet<TransactionType> FEDERATED_TRANSACTION_TYPES = EnumSet.of(TransactionType.COINBASE,
TransactionType.TRANSFER);
private transient SemuxGui gui;
private transient WalletModel model;
private JLabel blockNum;
private JLabel blockTime;
private JLabel coinbase;
private JLabel status;
private JLabel available;
private JLabel locked;
private JLabel total;
private JPanel transactions;
public HomePanel(SemuxGui gui) {
this.gui = gui;
this.model = gui.getModel();
this.model.addListener(this);
// setup overview panel
JPanel overview = new JPanel();
overview.setBorder(new TitledBorder(
new CompoundBorder(new EtchedBorder(EtchedBorder.LOWERED, null, null), new EmptyBorder(0, 10, 10, 10)),
GuiMessages.get("Overview"), TitledBorder.LEADING, TitledBorder.TOP, null, new Color(0, 0, 0)));
overview.setLayout(new GridLayout(7, 2, 0, 0));
JLabel labelBlockNum = new JLabel(GuiMessages.get("BlockNum") + ":");
overview.add(labelBlockNum);
blockNum = new JLabel("");
overview.add(blockNum);
JLabel lblBlockTime = new JLabel(GuiMessages.get("BlockTime") + ":");
overview.add(lblBlockTime);
blockTime = new JLabel("");
overview.add(blockTime);
JLabel labelCoinbase = new JLabel(GuiMessages.get("Coinbase") + ":");
overview.add(labelCoinbase);
coinbase = new JLabel("");
overview.add(coinbase);
JLabel labelStatus = new JLabel(GuiMessages.get("Status") + ":");
overview.add(labelStatus);
status = new JLabel("");
overview.add(status);
JLabel labelAvailable = new JLabel(GuiMessages.get("Available") + ":");
overview.add(labelAvailable);
available = new JLabel("");
overview.add(available);
JLabel labelLocked = new JLabel(GuiMessages.get("Locked") + ":");
overview.add(labelLocked);
locked = new JLabel("");
overview.add(locked);
JLabel labelTotal = new JLabel(GuiMessages.get("TotalBalance") + ":");
overview.add(labelTotal);
total = new JLabel("");
overview.add(total);
// setup transactions panel
transactions = new JPanel();
transactions.setBorder(new TitledBorder(
new CompoundBorder(new EtchedBorder(EtchedBorder.LOWERED, null, null), new EmptyBorder(0, 10, 10, 10)),
GuiMessages.get("Transactions"), TitledBorder.LEADING, TitledBorder.TOP, null, new Color(0, 0, 0)));
// @formatter:off
GroupLayout groupLayout = new GroupLayout(this);
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(overview, GroupLayout.PREFERRED_SIZE, 350, GroupLayout.PREFERRED_SIZE))
.addGap(18)
.addComponent(transactions, GroupLayout.DEFAULT_SIZE, 412, Short.MAX_VALUE))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.TRAILING)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(transactions, GroupLayout.DEFAULT_SIZE, 567, Short.MAX_VALUE)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(overview, GroupLayout.PREFERRED_SIZE, 199, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED, 353, Short.MAX_VALUE)
.addPreferredGap(ComponentPlacement.RELATED)))
.addGap(0))
);
transactions.setLayout(new BoxLayout(transactions, BoxLayout.Y_AXIS));
setLayout(groupLayout);
// @formatter:on
refresh();
}
public static class TransactionPanel extends JPanel {
private static final long serialVersionUID = 1L;
public TransactionPanel(Transaction tx, boolean inBound, boolean outBound, String description) {
this.setBorder(new EmptyBorder(10, 10, 10, 10));
JLabel lblType = new JLabel("");
String bounding = inBound ? "inbound" : "outbound";
String name = (inBound && outBound) ? "cycle" : (bounding);
lblType.setIcon(SwingUtil.loadImage(name, 42, 42));
String mathSign = inBound ? "+" : "-";
String prefix = (inBound && outBound) ? "" : (mathSign);
JLabel lblAmount = new JLabel(prefix + SwingUtil.formatAmount(tx.getValue()));
lblAmount.setToolTipText(SwingUtil.formatAmount(tx.getValue()));
lblAmount.setHorizontalAlignment(SwingConstants.RIGHT);
JLabel lblTime = new JLabel(SwingUtil.formatTimestamp(tx.getTimestamp()));
JLabel labelAddress = new JLabel(description);
labelAddress.setForeground(Color.GRAY);
// @formatter:off
GroupLayout groupLayout = new GroupLayout(this);
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addContainerGap()
.addComponent(lblType)
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(lblTime, GroupLayout.PREFERRED_SIZE, 169, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED, 87, Short.MAX_VALUE)
.addComponent(lblAmount, GroupLayout.PREFERRED_SIZE, 128, GroupLayout.PREFERRED_SIZE))
.addGroup(groupLayout.createSequentialGroup()
.addComponent(labelAddress, GroupLayout.DEFAULT_SIZE, 400, Short.MAX_VALUE)
.addContainerGap())))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.TRAILING, false)
.addGroup(groupLayout.createSequentialGroup()
.addGroup(groupLayout.createParallelGroup(Alignment.BASELINE)
.addComponent(lblTime, GroupLayout.PREFERRED_SIZE, 19, GroupLayout.PREFERRED_SIZE)
.addComponent(lblAmount, GroupLayout.PREFERRED_SIZE, 19, GroupLayout.PREFERRED_SIZE))
.addPreferredGap(ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(labelAddress))
.addComponent(lblType, Alignment.LEADING, GroupLayout.PREFERRED_SIZE, 36, GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
this.setLayout(groupLayout);
// @formatter:on
}
}
@Override
public synchronized void actionPerformed(ActionEvent e) {
Action action = Action.valueOf(e.getActionCommand());
switch (action) {
case REFRESH:
refresh();
break;
default:
throw new UnreachableException();
}
}
/**
* Refreshes this panel.
*/
protected void refresh() {
Block block = model.getLatestBlock();
this.blockNum.setText(SwingUtil.formatNumber(block.getNumber()));
this.blockTime.setText(SwingUtil.formatTimestamp(block.getTimestamp()));
this.coinbase.setText(SwingUtil.getAddressAbbr(model.getCoinbase().toAddress()));
this.coinbase.setToolTipText(Hex.PREF + model.getCoinbase().toAddressString());
this.status.setText(model.getStatus() == Status.VALIDATOR ? GuiMessages.get("Validator")
: (model.getStatus() == Status.DELEGATE ? GuiMessages.get("Delegate") : GuiMessages.get("Normal")));
this.available.setText(SwingUtil.formatAmount(model.getTotalAvailable()));
this.available.setToolTipText(SwingUtil.formatAmount(model.getTotalAvailable()));
this.locked.setText(SwingUtil.formatAmount(model.getTotalLocked()));
this.locked.setToolTipText(SwingUtil.formatAmount(model.getTotalLocked()));
this.total.setText(SwingUtil.formatAmount(sum(model.getTotalAvailable(), model.getTotalLocked())));
this.total.setToolTipText(SwingUtil.formatAmount(sum(model.getTotalAvailable(), model.getTotalLocked())));
// federate all transactions
Set<ByteArray> hashes = new HashSet<>();
List<Transaction> list = new ArrayList<>();
for (WalletAccount acc : model.getAccounts()) {
for (Transaction tx : acc.getTransactions()) {
ByteArray key = ByteArray.of(tx.getHash());
if (FEDERATED_TRANSACTION_TYPES.contains(tx.getType()) && !hashes.contains(key)) {
list.add(tx);
hashes.add(key);
}
}
}
list.sort((tx1, tx2) -> Long.compare(tx2.getTimestamp(), tx1.getTimestamp()));
list = list.size() > NUMBER_OF_TRANSACTIONS ? list.subList(0, NUMBER_OF_TRANSACTIONS) : list;
Set<ByteArray> accounts = new HashSet<>();
for (WalletAccount a : model.getAccounts()) {
accounts.add(ByteArray.of(a.getKey().toAddress()));
}
transactions.removeAll();
for (Transaction tx : list) {
boolean inBound = accounts.contains(ByteArray.of(tx.getTo()));
boolean outBound = accounts.contains(ByteArray.of(tx.getFrom()));
transactions.add(new TransactionPanel(tx, inBound, outBound, SwingUtil.getTransactionDescription(gui, tx)));
}
transactions.revalidate();
}
}
<file_sep># IntelliJ IDEA Setup Guide
This guide helps you set up InteliJ IDEA environment.
### Import the project
1. Clone the semux project via `git clone https://github.com/semuxproject/semux`;
2. Open IntelliJ IDEA and import it as a Maven project.
### Set up code stye
1. Download the [Eclipse formatter XML](https://raw.githubusercontent.com/semuxproject/semux/master/misc/formatter_eclipse.xml);
2. Download the [IntelliJ formatter XML](https://raw.githubusercontent.com/semuxproject/semux/master/misc/formatter_intellij.xml);
3. Go to `Preferences` > `Code Style` > `Java`, click the `gear icon` and import the downloaded schemas;
4. Now you're workspace is ready!
<file_sep># Contributing to Semux
Anyone is welcome to contribute towards development in the form of peer review, testing and patches. This document explains the practical process and guidelines for contributing.
## Code conventions
This project follows the [Google Java Style Guide](https://google.github.io/styleguide/javaguide.html) with additional requirements.
```
indent: no tab, 4 spaces instead
line limit: 120 chars
```
To format your code, please run the following command:
```
mvn formatter:format license:format
```
To check potential code-style and security issues, run:
```
mvn findbugs:check
```
For IDE users, [Eclipse Setup Guide](https://github.com/semuxproject/semux/blob/master/misc/eclipse/guide.md) and [IntelliJ IDEA Setup Guide](https://github.com/semuxproject/semux/blob/master/misc/intellij/guide.md) are also provided.
## Contributor workflow
To contribute a patch, the workflow is as follows:
1. Fork repository
2. Create topic branch
3. Commit patches
In general [commits should be atomic](https://en.wikipedia.org/wiki/Atomic_commit#Atomic_commit_convention) and diffs should be easy to read. For this reason do not mix any formatting fixes or code moves with actual code changes.
Commit messages should be verbose by default consisting of a short subject line (**imperative present tense, max 50 characters, prefixed by component**), a blank line and detailed explanatory text as separate paragraph(s), unless the title alone is self-explanatory in which case a single title line is sufficient. Commit messages should be helpful to people reading your code in the future, so explain the reasoning for your decisions. Further explanation [here](https://github.com/agis/git-style-guide). Example:
```
Component: short summary of changes
More detailed explanatory text, if necessary. In some contexts, the first
line is treated as the subject of an email and the rest of the text as the body.
The blank line separating the summary from the body is critical.
Further paragraphs come after blank lines.
Resolves: #56, #78
See also: #12, #34
```
The title of the pull request should be prefixed by the component or area that the pull request affects. Valid areas as:
- **API** for changes to the RESTful API code
- **CLI** for changes to the wallet CLI code
- **Config** for changes to configurations
- **Consensus** for changes to the consensus code
- **Core** for changes to the core data structures and algorithms
- **Crypto** for changes to the crypto code
- **DB** for changes to the database code
- **GUI** for changes to the wallet GUI code
- **Net** OR **P2P** for changes to the peer-to-peer network code
- **Util** for changes to the utils and libraries
- **VM** for changes to the database code
- **Docs** for changes to the docs
- **Tests** for changes to the unit test and QA tests
- **Tools** for changes to the scripts and tools
- **Trivial** should **only** be used for PRs that do not change generated executable code:
- comments
- whitespace
- variable names
- logging and messages
Examples:
```
Consensus: adjust the BFT timeout parameters
P2P: increase the max allowed connections
Trivial: fix typo in Semux.java
```
If a pull request is not to be considered for merging (yet), please prefix the title with [WIP] or use [Tasks Lists](https://help.github.com/articles/basic-writing-and-formatting-syntax/#task-lists) in the body of the pull request to indicate tasks are pending.
The body of the pull request should contain enough description about what the patch does together with any justification/reasoning. You should include references to any discussions (for example other tickets or mailing list discussions).
At this stage one should expect comments and review from other contributors. You can add more commits to your pull request by committing them locally and pushing to your fork until you have satisfied all feedback.
## Squashing commits
If your pull request is accepted for merging, you may be asked by a maintainer to squash and or [rebase](https://git-scm.com/docs/git-rebase) your commits before it will be merged. The basic squashing workflow is shown below.
```
git checkout your_branch_name
git rebase -i HEAD~n
# n is normally the number of commits in the pull
# set commits from 'pick' to 'squash', save and quit
# on the next screen, edit/refine commit messages
# save and quit
git push -f # (force push to GitHub)
```
## Pull request philosophy
Patchsets should always be focused. For example, a pull request could add a feature, fix a bug, or refactor code; but not a mixture. Please also avoid super pull requests which attempt to do too much, are overly large, or overly complex as this makes review difficult.
## Maintainer
For maintainer, make sure the PR you're going to merge passes all the checks and complies with this contributing guide. It's also advised to update the merge commit message to the following format:
```
Merge #123: Component: pull request title
Optional clarification or descriptions
```
## Copyright
By contributing to this repository, you agree to license your work under the MIT license. Any work contributed where you are not the original author must contain its license header with the original author(s) and source.
```
/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
```
<file_sep>FROM openjdk:8-jre-alpine
WORKDIR /
RUN apk --no-cache add curl jq
RUN LATEST=`curl -s https://api.github.com/repos/semuxproject/semux/releases/latest | jq '.assets[] | select(.name | contains("linux"))'` && \
LINK=`echo ${LATEST} | jq -r '.browser_download_url'` && \
TARBALL=`echo ${LATEST} | jq -r '.name'` && \
curl -Lo ${TARBALL} ${LINK} && \
mkdir -p /semux && \
tar -xzf ${TARBALL} -C /semux --strip-components=1 && \
rm ${TARBALL}
RUN apk del curl jq
EXPOSE 5161
ENTRYPOINT ["/semux/semux-cli.sh"]
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.config;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.semux.Network;
public class TestnetConfigTest {
@Test
public void testNetworkId() {
Config config = new TestnetConfig(Constants.DEFAULT_DATA_DIR);
assertEquals(Network.TESTNET, config.network());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.model;
import static org.semux.core.Amount.ZERO;
import java.awt.EventQueue;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import org.semux.core.Amount;
import org.semux.core.Block;
import org.semux.core.SyncManager;
import org.semux.crypto.Key;
import org.semux.gui.Action;
import org.semux.gui.SemuxEvent;
import org.semux.gui.SemuxEventListener;
import org.semux.net.Peer;
import org.semux.util.ByteArray;
/**
* A Model stores all the data that GUI needs. The thread-safety of this class
* is achieved by swapping pointers instead of synchronization.
*/
public class WalletModel {
private List<ActionListener> listeners = new CopyOnWriteArrayList<>();
private List<ActionListener> lockableComponents = new CopyOnWriteArrayList<>();
private List<SemuxEventListener> semuxEventListeners = new CopyOnWriteArrayList<>();
private SyncManager.Progress syncProgress;
private Block latestBlock;
private Key coinbase;
private Status status;
private volatile Map<ByteArray, Integer> accountsIndex = new HashMap<>();
private volatile List<WalletAccount> accounts = new ArrayList<>();
private volatile List<WalletDelegate> delegates = new ArrayList<>();
private Map<String, Peer> activePeers = new HashMap<>();
/**
* Fires an model update event.
*/
public void fireUpdateEvent() {
updateView();
}
/**
* Fires an lock event.
*/
public void fireLockEvent() {
lockView();
}
/**
* Fires a Semux event.
*
* @param event
*/
public void fireSemuxEvent(SemuxEvent event) {
semuxEventListeners.forEach(listener -> EventQueue.invokeLater(() -> listener.onSemuxEvent(event)));
}
/**
* Add a listener.
*
* @param listener
*/
public void addListener(ActionListener listener) {
listeners.add(listener);
}
/**
* Add a Semux event listener.
*
* @param listener
*/
public void addSemuxEventListener(SemuxEventListener listener) {
semuxEventListeners.add(listener);
}
/**
* Remove a Semux event listener.
*
* @param listener
*/
public void removeSemuxEventListener(SemuxEventListener listener) {
semuxEventListeners.remove(listener);
}
/**
* Add a component for locking.<br />
* This component has to provide Action.LOCK as ActionListener Event
*
* @param listener
*/
public void addLockable(ActionListener listener) {
lockableComponents.add(listener);
}
/**
* Getter for property ${@link #syncProgress}.
*
* @return Value to set for property ${@link #syncProgress}.
*/
public SyncManager.Progress getSyncProgress() {
return syncProgress;
}
/**
* Setter for property ${@link #syncProgress}.
*
* @param syncProgress
* Value to set for property ${@link #syncProgress}.
*/
public void setSyncProgress(SyncManager.Progress syncProgress) {
this.syncProgress = syncProgress;
}
/**
* Get the latest block.
*
* @return
*/
public Block getLatestBlock() {
return latestBlock;
}
/**
* Set the latest block.
*
* @param latestBlock
*/
public void setLatestBlock(Block latestBlock) {
this.latestBlock = latestBlock;
}
/**
* Get the coinbase.
*
* @return
*/
public Key getCoinbase() {
return coinbase;
}
/**
* Set the coinbase.
*
* @param coinbase
*/
public void setCoinbase(Key coinbase) {
this.coinbase = coinbase;
}
/**
* Returns the account status.
*
* @return
*/
public Status getStatus() {
return status;
}
/**
* Sets the account status.
*
* @param status
*/
public void setStatus(Status status) {
this.status = status;
}
/**
* Get the total available.
*
* @return
*/
public Amount getTotalAvailable() {
return accounts.stream().map(it -> it.getAvailable()).reduce(ZERO, Amount::sum);
}
/**
* Get the total locked.
*
* @return
*/
public Amount getTotalLocked() {
return accounts.stream().map(it -> it.getLocked()).reduce(ZERO, Amount::sum);
}
public List<WalletAccount> getAccounts() {
return accounts;
}
public int getAccountNumber(byte[] address) {
Integer n = accountsIndex.get(ByteArray.of(address));
return n == null ? -1 : n;
}
public WalletAccount getAccount(byte[] address) {
int accountNum = getAccountNumber(address);
return accountNum >= 0 ? accounts.get(accountNum) : null;
}
public void setAccounts(List<WalletAccount> accounts) {
Map<ByteArray, Integer> map = new HashMap<>();
for (int i = 0; i < accounts.size(); i++) {
map.put(ByteArray.of(accounts.get(i).getKey().toAddress()), i);
}
this.accounts = accounts;
this.accountsIndex = map;
}
public List<WalletDelegate> getDelegates() {
return delegates;
}
public void setDelegates(List<WalletDelegate> delegates) {
this.delegates = delegates;
}
public Map<String, Peer> getActivePeers() {
return activePeers;
}
public void setActivePeers(Map<String, Peer> activePeers) {
this.activePeers = activePeers;
}
/**
* Updates MVC view.
*/
protected void updateView() {
for (ActionListener listener : listeners) {
EventQueue.invokeLater(() -> listener.actionPerformed(new ActionEvent(this, 0, Action.REFRESH.name())));
}
}
/**
* Locks components.
*/
protected void lockView() {
for (ActionListener listener : lockableComponents) {
EventQueue.invokeLater(() -> listener.actionPerformed(new ActionEvent(this, 0, Action.LOCK.name())));
}
}
public enum Status {
NORMAL, DELEGATE, VALIDATOR
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.semux.crypto.Key;
import org.semux.util.Bytes;
public class VoteSetTest {
private long height = 1;
private int view = 1;
private Key v1 = new Key();
private Key v2 = new Key();
private Key v3 = new Key();
private Key v4 = new Key();
private VoteSet vs = null;
@Before
public void setUp() {
List<String> list = new ArrayList<>();
list.add(v1.toAddressString());
list.add(v2.toAddressString());
list.add(v3.toAddressString());
list.add(v4.toAddressString());
vs = new VoteSet(VoteType.VALIDATE, height, view, list);
}
@Test
public void testAddVote() {
Vote vote = Vote.newApprove(VoteType.VALIDATE, height, view, Bytes.EMPTY_HASH);
assertFalse(vs.addVote(vote));
vote.sign(new Key());
assertFalse(vs.addVote(vote));
vote.sign(v1);
assertTrue(vs.addVote(vote));
vote = Vote.newApprove(VoteType.VALIDATE, height + 1, view, Bytes.EMPTY_HASH);
vote.sign(v1);
assertFalse(vs.addVote(vote));
vote = Vote.newApprove(VoteType.VALIDATE, height, view + 1, Bytes.EMPTY_HASH);
vote.sign(v1);
assertFalse(vs.addVote(vote));
}
@Test
public void testClear() {
Vote vote = Vote.newApprove(VoteType.VALIDATE, height, view, Bytes.EMPTY_HASH);
vote.sign(v1);
assertTrue(vs.addVote(vote));
Vote vote2 = Vote.newReject(VoteType.VALIDATE, height, view);
vote2.sign(v1);
assertTrue(vs.addVote(vote2));
assertEquals(1, vs.getApprovals(Bytes.EMPTY_HASH).size());
assertEquals(1, vs.getRejections().size());
vs.clear();
assertEquals(0, vs.getApprovals(Bytes.EMPTY_HASH).size());
assertEquals(0, vs.getRejections().size());
}
@Test
public void testAddVotes() {
Vote vote = Vote.newApprove(VoteType.VALIDATE, height, view, Bytes.EMPTY_HASH);
vote.sign(v1);
Vote vote2 = Vote.newReject(VoteType.VALIDATE, height, view);
vote2.sign(v1);
assertEquals(2, vs.addVotes(Arrays.asList(vote, vote2)));
assertEquals(2, vs.size());
assertEquals(1, vs.getApprovals(Bytes.EMPTY_HASH).size());
assertEquals(1, vs.getRejections().size());
}
@Test
public void testTwoThirds() {
Vote vote = Vote.newApprove(VoteType.VALIDATE, height, view, Bytes.EMPTY_HASH);
vote.sign(v1);
assertTrue(vs.addVote(vote));
assertFalse(vs.anyApproved().isPresent());
vote.sign(v2);
assertTrue(vs.addVote(vote));
assertFalse(vs.anyApproved().isPresent());
vote.sign(v3);
assertTrue(vs.addVote(vote));
assertTrue(vs.anyApproved().isPresent());
assertTrue(vs.isApproved(Bytes.EMPTY_HASH));
}
@Test
public void testToString() {
assertTrue(!vs.toString().startsWith("java.lang.Object"));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.consensus;
import java.util.ArrayList;
import java.util.List;
import org.semux.Network;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.Transaction;
import org.semux.crypto.Key;
import org.semux.crypto.Key.Signature;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
public class Proposal {
private Proof proof;
private BlockHeader blockHeader;
private List<Transaction> transactions;
private byte[] encoded;
private Signature signature;
public Proposal(Proof proof, BlockHeader blockHeader, List<Transaction> transactions) {
this.proof = proof;
this.blockHeader = blockHeader;
this.transactions = transactions;
SimpleEncoder enc = new SimpleEncoder();
enc.writeBytes(proof.toBytes());
enc.writeBytes(blockHeader.toBytes());
enc.writeInt(transactions.size());
for (Transaction tx : transactions) {
enc.writeBytes(tx.toBytes());
}
this.encoded = enc.toBytes();
}
public Proposal(byte[] encoded, byte[] signature) {
SimpleDecoder dec = new SimpleDecoder(encoded);
this.proof = Proof.fromBytes(dec.readBytes());
this.blockHeader = BlockHeader.fromBytes(dec.readBytes());
this.transactions = new ArrayList<>();
int n = dec.readInt();
for (int i = 0; i < n; i++) {
transactions.add(Transaction.fromBytes(dec.readBytes()));
}
this.encoded = encoded;
this.signature = Signature.fromBytes(signature);
}
/**
* Sign this proposal.
*
* @param key
* @return
*/
public Proposal sign(Key key) {
this.signature = key.sign(encoded);
return this;
}
/**
* <p>
* Validate proposal format and signature.
* </p>
*
* <p>
* NOTE: this method will NOT validate the proposed block, nor the proof, nor
* the transactions inside the block. Use
* {@link Block#validateHeader(BlockHeader, BlockHeader)} and
* {@link Block#validateTransactions(BlockHeader, List, Network)} for that
* purpose.
* </p>
*
* @return true if success, otherwise false
*/
public boolean validate() {
return getHeight() > 0
&& getView() >= 0
&& proof != null
&& blockHeader != null
&& transactions != null
&& proof.getHeight() == blockHeader.getNumber()
&& encoded != null
&& signature != null && Key.verify(encoded, signature);
}
public Proof getProof() {
return proof;
}
public long getHeight() {
return proof.getHeight();
}
public int getView() {
return proof.getView();
}
public BlockHeader getBlockHeader() {
return blockHeader;
}
public List<Transaction> getTransactions() {
return transactions;
}
public Signature getSignature() {
return signature;
}
public byte[] toBytes() {
SimpleEncoder enc = new SimpleEncoder();
enc.writeBytes(encoded);
enc.writeBytes(signature.toBytes());
return enc.toBytes();
}
public static Proposal fromBytes(byte[] bytes) {
SimpleDecoder dec = new SimpleDecoder(bytes);
byte[] encoded = dec.readBytes();
byte[] signature = dec.readBytes();
return new Proposal(encoded, signature);
}
@Override
public String toString() {
return "Proposal [height=" + getHeight() + ", view = " + getView() + ", # proof votes = "
+ proof.getVotes().size() + ", # txs = " + transactions.size() + "]";
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.rules;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.spy;
import static org.powermock.api.mockito.PowerMockito.when;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.rules.TemporaryFolder;
import org.semux.KernelMock;
import org.semux.config.Config;
import org.semux.config.DevnetConfig;
import org.semux.core.Block;
import org.semux.core.BlockHeader;
import org.semux.core.BlockchainImpl;
import org.semux.core.PendingManager;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.Wallet;
import org.semux.crypto.Hex;
import org.semux.crypto.Key;
import org.semux.db.LeveldbDatabase.LevelDbFactory;
import org.semux.util.Bytes;
import org.semux.util.MerkleUtil;
/**
* A kernel rule creates a temporary folder as the data directory. Ten accounts
* will be created automatically and the first one will be used as coinbase.
*/
public class KernelRule extends TemporaryFolder {
private int p2pPort;
private int apiPort;
private String password;
private KernelMock kernel;
private LevelDbFactory dbFactory;
public KernelRule(int p2pPort, int apiPort) {
super();
this.p2pPort = p2pPort;
this.apiPort = apiPort;
}
@Override
protected void before() throws Throwable {
create();
// generate random password
this.password = Hex.encode(Bytes.random(12));
// generate kernel mock
Config config = mockConfig(p2pPort, apiPort);
Wallet wallet = new Wallet(new File(getRoot(), "wallet.data"));
wallet.unlock(password);
for (int i = 0; i < 10; i++) {
wallet.addAccount(new Key());
}
wallet.flush();
Key coinbase = wallet.getAccount(0);
this.kernel = new KernelMock(config, wallet, coinbase);
this.kernel.setPendingManager(mock(PendingManager.class));
}
@Override
protected void after() {
kernel.stop();
delete();
}
protected Config mockConfig(int p2pPort, int apiPort) {
Config config = spy(new DevnetConfig(getRoot().getAbsolutePath()));
when(config.p2pDeclaredIp()).thenReturn(Optional.of("127.0.0.1"));
when(config.p2pListenIp()).thenReturn("127.0.0.1");
when(config.p2pListenPort()).thenReturn(p2pPort);
when(config.apiListenIp()).thenReturn("127.0.0.1");
when(config.apiListenPort()).thenReturn(apiPort);
when(config.apiEnabled()).thenReturn(true);
when(config.apiUsername()).thenReturn("username");
when(config.apiPassword()).thenReturn("<PASSWORD>");
return config;
}
/**
* Returns the password.
*
* @return
*/
public String getPassword() {
return password;
}
/**
* Returns the kernel.
*
* @return
*/
public KernelMock getKernel() {
return kernel;
}
/**
* Speed up the consensus.
*/
public void speedUpConsensus() throws IOException {
Config config = kernel.getConfig();
// speed up consensus
when(config.bftNewHeightTimeout()).thenReturn(1000L);
when(config.bftProposeTimeout()).thenReturn(1000L);
when(config.bftValidateTimeout()).thenReturn(1000L);
when(config.bftPreCommitTimeout()).thenReturn(1000L);
when(config.bftCommitTimeout()).thenReturn(1000L);
when(config.bftFinalizeTimeout()).thenReturn(1000L);
}
/**
* Opens the database.
*/
public void openBlockchain() {
dbFactory = new LevelDbFactory(kernel.getConfig().databaseDir());
BlockchainImpl chain = new BlockchainImpl(kernel.getConfig(), dbFactory);
kernel.setBlockchain(chain);
}
/**
* Closes the database.
*/
public void closeBlockchain() {
dbFactory.close();
}
/**
* Helper method to create a testing block.
*
* @param txs
* list of transaction
* @return created block
*/
public Block createBlock(List<Transaction> txs) {
List<TransactionResult> res = txs.stream().map(tx -> new TransactionResult(true)).collect(Collectors.toList());
long number = getKernel().getBlockchain().getLatestBlock().getNumber() + 1;
Key key = new Key();
byte[] coinbase = key.toAddress();
byte[] prevHash = getKernel().getBlockchain().getLatestBlock().getHash();
long timestamp = System.currentTimeMillis();
byte[] transactionsRoot = MerkleUtil.computeTransactionsRoot(txs);
byte[] resultsRoot = MerkleUtil.computeResultsRoot(res);
byte[] stateRoot = Bytes.EMPTY_HASH;
byte[] data = {};
BlockHeader header = new BlockHeader(
number,
coinbase,
prevHash,
timestamp,
transactionsRoot,
resultsRoot,
stateRoot,
data);
return new Block(header, txs, res);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.bench;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.junit.Rule;
import org.junit.Test;
import org.semux.api.SemuxApiMock;
import org.semux.config.Config;
import org.semux.rules.KernelRule;
import org.semux.util.ApiClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO: investigate, significant performance decrease noticed.
*/
public class ApiPerformance {
private static final Logger logger = LoggerFactory.getLogger(ApiPerformance.class);
@Rule
public KernelRule kernelRule = new KernelRule(51610, 51710);
@Test
public void testBasic() throws IOException {
SemuxApiMock api = new SemuxApiMock(kernelRule.getKernel());
api.start();
try {
int repeat = 1000;
Config config = api.getKernel().getConfig();
long t1 = System.nanoTime();
for (int i = 0; i < repeat; i++) {
ApiClient a = new ApiClient(new InetSocketAddress(config.apiListenIp(), config.apiListenPort()),
config.apiUsername(),
config.apiPassword());
a.request("get_info");
}
long t2 = System.nanoTime();
logger.info("Perf_api_basic: " + (t2 - t1) / 1_000 / repeat + " μs/time");
} finally {
api.stop();
}
}
}
<file_sep>#!/bin/sh
# change work directory
cd "$(dirname "$0")"
# start kernel
java -cp semux.jar org.semux.wrapper.Wrapper \
--jvmoptions "" \
--gui "$@"
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.p2p;
import org.semux.net.msg.Message;
import org.semux.net.msg.MessageCode;
import org.semux.net.msg.ReasonCode;
import org.semux.util.Bytes;
public class DisconnectMessage extends Message {
private ReasonCode reason;
/**
* Create a DISCONNECT message.
*
* @param reason
*/
public DisconnectMessage(ReasonCode reason) {
super(MessageCode.DISCONNECT, null);
this.reason = reason;
this.encoded = Bytes.of(reason.toByte());
}
/**
* Parse a DISCONNECT message from byte array.
*
* @param encoded
*/
public DisconnectMessage(byte[] encoded) {
super(MessageCode.DISCONNECT, null);
this.encoded = encoded;
this.reason = ReasonCode.of(Bytes.toByte(encoded));
}
public ReasonCode getReason() {
return reason;
}
@Override
public String toString() {
return "DisconnectMessage [reason=" + reason + "]";
}
}<file_sep># Eclipse Setup Guide
This guide helps you set up Eclipse environment.
### Import the project
1. Clone the semux project via `git clone https://github.com/semuxproject/semux`;
2. Open Eclipse and import it as `Existing Maven Project`.
### Set up code stye
1. Download the [Eclipse formatter XML](https://raw.githubusercontent.com/semuxproject/semux/master/misc/formatter_eclipse.xml);
2. Go to `Preferences` > `Java` > `Code Style` > `Formatter`, and import the downloaded schema;
3. Now you're workspace is ready!
<file_sep>#!/bin/bash
#
# Copyright (c) 2015 the authors of j2objc-gradle (see AUTHORS file)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euv
# What Java versions do we have?
/usr/libexec/java_home -V
# Prep brew itself
brew update
brew outdated caskroom/cask/brew-cask || brew upgrade caskroom/cask/brew-cask
# We must be able to get older Java versions than the latest.
brew tap caskroom/versions
sudo rm -rf /Library/Java/JavaVirtualMachines
brew cask install caskroom/versions/java8
# Fail unless we installed JDK 8 correctly.
/usr/libexec/java_home --failfast --version 1.8
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.cli;
/**
* Semux launcher options.
*/
public enum SemuxOption {
HELP("help"),
VERSION("version"),
ACCOUNT("account"),
CHANGE_PASSWORD("<PASSWORD>"),
DATA_DIR("datadir"),
COINBASE("coinbase"),
PASSWORD("<PASSWORD>"),
DUMP_PRIVATE_KEY("dumpprivatekey"),
IMPORT_PRIVATE_KEY("importprivatekey"),
NETWORK("network");
private final String name;
SemuxOption(String s) {
name = s;
}
@Override
public String toString() {
return this.name;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.bench;
import org.semux.crypto.Hash;
import org.semux.crypto.Key;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CryptoPerformance {
private static final Logger logger = LoggerFactory.getLogger(CryptoPerformance.class);
private static int[] DATA_SIZES = { 1024, 1024 * 1024 };
private static int REPEAT = 1000;
public static void testH256() {
for (int size : DATA_SIZES) {
byte[] data = new byte[size];
long t1 = System.nanoTime();
for (int i = 0; i < REPEAT; i++) {
Hash.h256(data);
}
long t2 = System.nanoTime();
logger.info("Perf_h256_{}k: {} μs/time", size / 1024, (t2 - t1) / 1_000 / REPEAT);
}
}
public static void testH160() {
for (int size : DATA_SIZES) {
byte[] data = new byte[size];
long t1 = System.nanoTime();
for (int i = 0; i < REPEAT; i++) {
Hash.h160(data);
}
long t2 = System.nanoTime();
logger.info("Perf_h160_{}k: {} μs/time", size / 1024, (t2 - t1) / 1_000 / REPEAT);
}
}
public static void testSign() {
for (int size : DATA_SIZES) {
Key eckey = new Key();
byte[] data = new byte[size];
byte[] hash = Hash.h256(data);
long t1 = System.nanoTime();
for (int i = 0; i < REPEAT; i++) {
eckey.sign(hash);
}
long t2 = System.nanoTime();
logger.info("Perf_sign_{}k: {} μs/time", size / 1024, (t2 - t1) / 1_000 / REPEAT);
}
}
public static void testVerify() {
for (int size : DATA_SIZES) {
Key eckey = new Key();
byte[] data = new byte[size];
byte[] hash = Hash.h256(data);
byte[] sig = eckey.sign(hash).toBytes();
long t1 = System.nanoTime();
for (int i = 0; i < REPEAT; i++) {
Key.verify(hash, sig);
}
long t2 = System.nanoTime();
logger.info("Perf_verify_{}k: {} μs/time", size / 1024, (t2 - t1) / 1_000 / REPEAT);
}
}
public static void main(String[] args) throws Exception {
testH256();
testH160();
testSign();
testVerify();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core.state;
import static org.semux.core.Amount.sum;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.semux.core.Amount;
import org.semux.db.Database;
import org.semux.util.ByteArray;
import org.semux.util.Bytes;
/**
* Account state implementation.
*
* <pre>
* account DB structure:
*
* [0, address] => [account_object]
* [1, address] => [code]
* [2, address, storage_key] = [storage_value]
* </pre>
*/
public class AccountStateImpl implements AccountState {
protected static final byte TYPE_ACCOUNT = 0;
protected static final byte TYPE_CODE = 1;
protected static final byte TYPE_STORAGE = 2;
protected Database accountDB;
protected AccountStateImpl prev;
/**
* All updates, or deletes if the value is null.
*/
protected final Map<ByteArray, byte[]> updates = new ConcurrentHashMap<>();
/**
* Create an {@link AccountState} that work directly on a database.
*
* @param accountDB
*/
public AccountStateImpl(Database accountDB) {
this.accountDB = accountDB;
}
/**
* Create an {@link AccountState} based on a previous AccountState.
*
* @param prev
*/
public AccountStateImpl(AccountStateImpl prev) {
this.prev = prev;
}
@Override
public Account getAccount(byte[] address) {
ByteArray k = getKey(TYPE_ACCOUNT, address);
Amount noAmount = Amount.ZERO;
if (updates.containsKey(k)) {
byte[] v = updates.get(k);
return v == null ? new Account(address, noAmount, noAmount, 0) : Account.fromBytes(address, v);
} else if (prev != null) {
return prev.getAccount(address);
} else {
byte[] v = accountDB.get(k.getData());
return v == null ? new Account(address, noAmount, noAmount, 0) : Account.fromBytes(address, v);
}
}
@Override
public void increaseNonce(byte[] address) {
ByteArray k = getKey(TYPE_ACCOUNT, address);
Account acc = getAccount(address);
acc.setNonce(acc.getNonce() + 1);
updates.put(k, acc.toBytes());
}
@Override
public void adjustAvailable(byte[] address, Amount delta) {
ByteArray k = getKey(TYPE_ACCOUNT, address);
Account acc = getAccount(address);
acc.setAvailable(sum(acc.getAvailable(), delta));
updates.put(k, acc.toBytes());
}
@Override
public void adjustLocked(byte[] address, Amount delta) {
ByteArray k = getKey(TYPE_ACCOUNT, address);
Account acc = getAccount(address);
acc.setLocked(sum(acc.getLocked(), delta));
updates.put(k, acc.toBytes());
}
@Override
public void getCode(byte[] address) {
throw new UnsupportedOperationException("getCode() is not yet supported");
}
@Override
public void setCode(byte[] address, byte[] code) {
throw new UnsupportedOperationException("setCode() is not yet supported");
}
@Override
public byte[] getStorage(byte[] address, byte[] key) {
throw new UnsupportedOperationException("getStorage() is not yet supported");
}
@Override
public void putStorage(byte[] address, byte[] key, byte[] value) {
throw new UnsupportedOperationException("putStorage() is not yet supported");
}
@Override
public void removeStorage(byte[] address, byte[] key) {
throw new UnsupportedOperationException("removeStorage() is not yet yet supported");
}
@Override
public AccountState track() {
return new AccountStateImpl(this);
}
@Override
public void commit() {
synchronized (updates) {
if (prev == null) {
for (Map.Entry<ByteArray, byte[]> entry : updates.entrySet()) {
if (entry.getValue() == null) {
accountDB.delete(entry.getKey().getData());
} else {
accountDB.put(entry.getKey().getData(), entry.getValue());
}
}
} else {
for (Entry<ByteArray, byte[]> e : updates.entrySet()) {
prev.updates.put(e.getKey(), e.getValue());
}
}
updates.clear();
}
}
@Override
public void rollback() {
updates.clear();
}
protected ByteArray getKey(byte type, byte[] address) {
return ByteArray.of(Bytes.merge(type, address));
}
protected ByteArray getStorageKey(byte[] address, byte[] key) {
byte[] buf = new byte[1 + address.length + key.length];
buf[0] = TYPE_STORAGE;
System.arraycopy(address, 0, buf, 1, address.length);
System.arraycopy(key, 0, buf, 1 + address.length, key.length);
return ByteArray.of(buf);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
public class IOUtil {
/**
* Reads the given input stream into byte array.
*
* @param in
* @return
* @throws IOException
*/
public static byte[] readStream(InputStream in) throws IOException {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
BufferedInputStream bin = new BufferedInputStream(in);
for (int c; (c = bin.read()) != -1;) {
buf.write(c);
}
return buf.toByteArray();
}
/**
* Reads the given input stream as a string.
*
* @param in
* @return
* @throws IOException
*/
public static String readStreamAsString(InputStream in) throws IOException {
return Bytes.toString(readStream(in));
}
/**
* Reads file as a byte array.
*
* @param file
* The file to read
* @return a byte array; empty array if the file does not exist
* @throws IOException
*/
public static byte[] readFile(File file) throws IOException {
if (file.exists()) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (InputStream in = new BufferedInputStream(new FileInputStream(file))) {
for (int c; (c = in.read()) != -1;) {
out.write(c);
}
}
return out.toByteArray();
} else {
return Bytes.EMPTY_BYTES;
}
}
/**
* Reads file as a string.
*
* @param file
* File to read
* @return The file content as string; empty if the file does not exist
* @throws IOException
*/
public static String readFileAsString(File file) throws IOException {
return Bytes.toString(readFile(file));
}
/**
* Writes a byte array into a file.
*
* @param bytes
* A byte array
* @param file
* Destination file
* @throws IOException
*/
public static void writeToFile(byte[] bytes, File file) throws IOException {
try (BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(file))) {
out.write(bytes);
}
}
/**
* Writes a {@link String} into a file.
*
* @param str
* String contents
* @param file
* Destination file
* @throws IOException
*/
public static void writeToFile(String str, File file) throws IOException {
writeToFile(Bytes.of(str), file);
}
/**
* Reads file by lines.
*
* @param file
* The file to read
* @return A list of lines in the file, or empty if the file doesn't exist
* @throws IOException
*/
public static List<String> readFileAsLines(File file, Charset charset) throws IOException {
List<String> lines = new ArrayList<>();
if (file.isFile()) {
try (BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(file), charset))) {
for (String line; (line = in.readLine()) != null;) {
lines.add(line);
}
}
}
return lines;
}
/**
* Copy a file to a new location.
*
* @param src
* Source file
* @param dst
* Destination file
* @param replaceExisting
* Whether to replace existing file
* @throws IOException
*/
public static void copyFile(File src, File dst, boolean replaceExisting) throws IOException {
if (replaceExisting || !dst.exists()) {
Files.copy(src.toPath(), dst.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
}
private IOUtil() {
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import java.awt.BorderLayout;
import java.awt.Dimension;
public class StatusBarTestApplication extends BaseTestApplication {
private static final long serialVersionUID = 1L;
protected StatusBar statusBar;
StatusBarTestApplication() {
super();
this.setMinimumSize(new Dimension(960, 600));
statusBar = new StatusBar(this);
this.add(statusBar, BorderLayout.SOUTH);
getContentPane().add(statusBar);
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import javax.swing.text.JTextComponent;
/**
* Placeholder of a Swing text component based on ${@link TextPrompt}.
*/
public class PlaceHolder extends TextPrompt {
private static final long serialVersionUID = -1350764114359129512L;
public PlaceHolder(String text, JTextComponent component) {
super(text, component);
changeAlpha(0.5f);
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.dialog;
import java.util.ArrayList;
import java.util.List;
import org.assertj.swing.edt.GuiActionRunner;
import org.assertj.swing.fixture.FrameFixture;
import org.assertj.swing.junit.testcase.AssertJSwingJUnitTestCase;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.semux.crypto.Key;
import org.semux.gui.AddressBookEntry;
import org.semux.gui.model.WalletModel;
import org.semux.rules.KernelRule;
@RunWith(MockitoJUnitRunner.class)
public class AddressBookDialogTest extends AssertJSwingJUnitTestCase {
@Rule
public KernelRule kernelRule1 = new KernelRule(51610, 51710);
@Mock
WalletModel walletModel;
@Test
public void testListAddressBook() {
Key account1 = new Key(), account2 = new Key();
List<AddressBookEntry> entries = new ArrayList<>();
entries.add(new AddressBookEntry("address1", account1.toAddressString()));
entries.add(new AddressBookEntry("address2", account2.toAddressString()));
AddressBookDialogTestApplication application = GuiActionRunner
.execute(() -> new AddressBookDialogTestApplication(walletModel, kernelRule1.getKernel(), entries));
FrameFixture window = new FrameFixture(robot(), application);
window.show().requireVisible().moveToFront();
window.dialog("AddressBookDialog").requireVisible()
.table().requireVisible().requireRowCount(2).requireContents(new String[][] {
{ "address1", account1.toAddressString() },
{ "address2", account2.toAddressString() }
});
}
@Override
protected void onSetUp() {
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import org.semux.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TransactionResultTest {
private static final Logger logger = LoggerFactory.getLogger(TransactionResultTest.class);
private boolean valid = true;
private byte[] returns = Bytes.random(20);
private List<byte[]> logs = Arrays.asList(Bytes.random(8), Bytes.random(8));
@Test
public void testNew() {
TransactionResult res = new TransactionResult(valid, returns, logs);
assertTrue(res.validate());
testFields(res);
}
@Test
public void testSerialization() {
TransactionResult res = new TransactionResult(valid, returns, logs);
testFields(TransactionResult.fromBytes(res.toBytes()));
}
@Test
public void testTransactionResultSize() {
TransactionResult res = new TransactionResult(valid, returns, logs);
byte[] bytes = res.toBytes();
logger.info("result size: {} B, {} GB per 1M txs", bytes.length, 1000000.0 * bytes.length / 1024 / 1024 / 1024);
}
private void testFields(TransactionResult res) {
assertEquals(valid, res.isSuccess());
assertArrayEquals(returns, res.getReturns());
assertEquals(logs.size(), res.getLogs().size());
for (int i = 0; i < logs.size(); i++) {
assertArrayEquals(logs.get(i), res.getLogs().get(i));
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg;
/**
* Utility that keeps track of the number of retries and lastTimestamp.
*
*/
public class MessageWrapper {
private final Message message;
private long lastTimestamp = 0;
private int retries = 0;
private boolean isAnswered = false;
/**
* Create a message round trip.
*
* @param message
*/
public MessageWrapper(Message message) {
this.message = message;
saveTime();
}
public void saveTime() {
lastTimestamp = System.currentTimeMillis();
}
public void answer() {
this.isAnswered = true;
}
public void increaseRetries() {
++retries;
}
public Message getMessage() {
return message;
}
public long getLastTimestamp() {
return lastTimestamp;
}
public int getRetries() {
return retries;
}
public boolean isAnswered() {
return isAnswered;
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.semux.core.Amount.ZERO;
import static org.semux.core.Amount.Unit.SEM;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import org.semux.Network;
import org.semux.core.Amount;
import org.semux.core.Transaction;
import org.semux.core.TransactionResult;
import org.semux.core.TransactionType;
import org.semux.crypto.Hash;
import org.semux.crypto.Key;
public class MerkleUtilTest {
@Test
public void testComputeTransactionsRoot() {
Network network = Network.DEVNET;
TransactionType type = TransactionType.TRANSFER;
byte[] to = Bytes.random(20);
Amount value = SEM.of(1);
Amount fee = ZERO;
long nonce = 1;
long timestamp = System.currentTimeMillis();
byte[] data = Bytes.random(128);
Transaction tx1 = new Transaction(network, type, to, value, fee, nonce, timestamp, data).sign(new Key());
Transaction tx2 = new Transaction(network, type, to, value, fee, nonce, timestamp, data).sign(new Key());
byte[] b1 = tx1.getHash();
byte[] b2 = tx2.getHash();
byte[] root = new MerkleTree(Arrays.asList(b1, b2)).getRootHash();
byte[] merkle = MerkleUtil.computeTransactionsRoot(Arrays.asList(tx1, tx2));
assertThat(merkle, equalTo(root));
}
@Test
public void testComputeResultsRoot() {
TransactionResult res1 = new TransactionResult(true, Bytes.random(20), Collections.emptyList());
TransactionResult res2 = new TransactionResult(false, Bytes.random(20), Collections.emptyList());
byte[] b1 = Hash.h256(res1.toBytes());
byte[] b2 = Hash.h256(res2.toBytes());
byte[] root = new MerkleTree(Arrays.asList(b1, b2)).getRootHash();
byte[] merkle = MerkleUtil.computeResultsRoot(Arrays.asList(res1, res2));
assertThat(merkle, equalTo(root));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api;
import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* ApiHandlerResponse is the base class of Semux API responses
*/
public class ApiHandlerResponse {
@JsonProperty(value = "success", required = true)
public final Boolean success;
@JsonProperty("message")
@JsonInclude(NON_NULL)
public String message;
@JsonCreator
public ApiHandlerResponse(
@JsonProperty(value = "success", required = true) Boolean success,
@JsonProperty("message") String message) {
this.success = success;
this.message = message;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static java.math.RoundingMode.FLOOR;
import static java.util.Arrays.stream;
import java.math.BigDecimal;
import java.math.BigInteger;
public final class Amount {
public static enum Unit {
NANO_SEM(0, "nSEM"),
MICRO_SEM(3, "μSEM"),
MILLI_SEM(6, "mSEM"),
SEM(9, "SEM"),
KILO_SEM(12, "kSEM"),
MEGA_SEM(15, "MSEM");
private final int exp;
private final long factor;
public final String symbol;
private Unit(int exp, String symbol) {
this.exp = exp;
this.factor = BigInteger.TEN.pow(exp).longValueExact();
this.symbol = symbol;
}
public static Unit ofSymbol(String s) {
return stream(values()).filter(i -> s.equals(i.symbol)).findAny().get();
}
public Amount of(long a) {
return new Amount(Math.multiplyExact(a, factor));
}
public BigDecimal toDecimal(Amount a, int scale) {
BigDecimal $nano = BigDecimal.valueOf(a.nano);
return $nano.movePointLeft(exp).setScale(scale, FLOOR);
}
public Amount fromDecimal(BigDecimal d) {
return new Amount(d.movePointRight(exp).setScale(0, FLOOR).longValueExact());
}
}
private final long nano;
public static final Amount ZERO = new Amount(0);
private Amount(long nano) {
this.nano = nano;
}
public long getNano() {
return nano;
}
public int compareTo(Amount other) {
return this.lt(other) ? -1 : (this.gt(other) ? 1 : 0);
}
@Override
public int hashCode() {
return Long.hashCode(nano);
}
@Override
public boolean equals(Object other) {
return other instanceof Amount && ((Amount) other).nano == nano;
}
@Override
public String toString() {
return Unit.SEM.toDecimal(this, 9).stripTrailingZeros().toPlainString() + " SEM";
}
public boolean gt(Amount other) {
return nano > other.nano;
}
public boolean gte(Amount other) {
return nano >= other.nano;
}
public boolean gt0() {
return gt(ZERO);
}
public boolean gte0() {
return gte(ZERO);
}
public boolean lt(Amount other) {
return nano < other.nano;
}
public boolean lte(Amount other) {
return nano <= other.nano;
}
public boolean lt0() {
return lt(ZERO);
}
public boolean lte0() {
return lte(ZERO);
}
public static Amount neg(Amount a) {
return new Amount(Math.negateExact(a.nano));
}
public static Amount sum(Amount a1, Amount a2) {
return new Amount(Math.addExact(a1.nano, a2.nano));
}
public static Amount sub(Amount a1, Amount a2) {
return new Amount(Math.subtractExact(a1.nano, a2.nano));
}
}
<file_sep>### What is a delegate
**Delegates** are special accounts on the **Semux BFT** consensus. Delegates are accounts that are available for voting and could become a validator. To register as a delegate you need to have `1000 SEMs` + transaction fee.
### How to register as a delegate
**To become a delegate**
1. Make sure you have enough balance (1000 SEMs + transaction fee);
2. Click on the `Delegates` tab;
3. In the middle right side you can see a bar **below** `Unvote`;
4. Type the **name** you want your delegate to appear;
5. Click `Register as delegate`.
### What is validator
**Validators** are delegates who are allowed to forge/mine blocks and validate transaction for the BFT Protocol. To become a `Validator`, a `Delegate` need to have enough number of votes to make it to the `Top 100` of the list. **Validators** are indicated with the **V** symbol compared to **S** symbol for other delegates.
### How to become a validator
**To become a validator**
1. Make sure to register as delegate
2. Vote for your own delegate
1. Click on the `Delegates` tab;
2. On the right side type the number of votes you want to put for your delegate;
3. Click on your delegate;
4. Click Vote (note: votes will remain locked until you `unvote`).
3. Wait for others to vote for your own delegate
### Number of validators
At the start of the network there will be **16 validators slots**. The `Top 16 validators` with the most number of votes will automatically become validators. Delegates can add more votes to remain in the `Top 16`. The number of validators will increment by **1 every 2 hours** until the **maximum of 100 validators** is reached.
### Recommended validator setup
Validator needs to be backed by a powerful computer.
**Minimum Setup**
* 8GB Memory
* Dual Core CPU
* 100 Mbps Bandwidth
**Recommended Setup**
* 16GB Memory
* Quad Core CPU
* 200 Mbps Bandwidth
Note: bandwidth requirements are for both inbound and outbound traffic.<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.core;
import static junit.framework.TestCase.assertTrue;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.junit.Assert.assertFalse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.withSettings;
import org.junit.BeforeClass;
import org.junit.Test;
import org.powermock.reflect.Whitebox;
import org.semux.consensus.ValidatorActivatedFork;
import org.semux.crypto.Hex;
public class BlockHeaderDataTest {
private static final ValidatorActivatedFork[] eightPendingForks = new ValidatorActivatedFork[8];
private static final byte[] v1_8xPendingForksEncoded = Hex.decode("01110800010002000300040005000600070008");
private static final byte[] v1_0xPendingForkEncoded = Hex.decode("010100");
private static final ValidatorActivatedFork[] onePendingFork = new ValidatorActivatedFork[1];
private static final byte[] v1_1xPendingForkEncoded = Hex.decode("0103010001");
private static final byte[] unrecognized_reservedDataEncoded = Hex
.decode("f1a253afc2ae97cd1a562d8a829f26fa876bb48e264fdfb1d18df3c84271");
private static final byte[] unrecognized_headerDataEncoded = Hex
.decode("ff1ef1a253afc2ae97cd1a562d8a829f26fa876bb48e264fdfb1d18df3c84271");
@BeforeClass
public static void beforeClass() {
for (short i = 1; i <= 8; i++) {
ValidatorActivatedFork a = mock(ValidatorActivatedFork.class);
Whitebox.setInternalState(a, "number", i);
eightPendingForks[i - 1] = a;
}
onePendingFork[0] = ValidatorActivatedFork.UNIFORM_DISTRIBUTION;
}
@Test
public void testV0HeaderData() {
BlockHeaderData blockHeaderData = BlockHeaderData.v0();
assertThat(blockHeaderData.toBytes()).hasSize(0);
assertFalse(blockHeaderData.signalingFork(ValidatorActivatedFork.UNIFORM_DISTRIBUTION));
assertThat(blockHeaderData.version).isEqualTo((byte) 0x00);
assertThat(BlockHeaderData.fromBytes(null).version).isEqualTo((byte) 0x00);
assertThat(BlockHeaderData.fromBytes(new byte[0]).version).isEqualTo((byte) 0x00);
}
@Test
public void testV1HeaderDataEncoding() {
// one pending fork
BlockHeaderData blockHeaderData = BlockHeaderData.v1(new BlockHeaderData.ForkSignalSet(onePendingFork));
assertThat(blockHeaderData.version).isEqualTo((byte) 0x01);
assertThat(blockHeaderData.toBytes()).isEqualTo(v1_1xPendingForkEncoded).hasSize(5); // writeByte(1) +
// writeSize(1) +
// writeByte(1) +
// writeShort(2)
// zero pending fork
blockHeaderData = BlockHeaderData.v1(new BlockHeaderData.ForkSignalSet());
assertThat(blockHeaderData.version).isEqualTo((byte) 0x01);
assertThat(blockHeaderData.toBytes()).isEqualTo(v1_0xPendingForkEncoded).hasSize(3); // writeByte(1) +
// writeSize(1) +
// writeByte(1)
// eight pending forks
blockHeaderData = BlockHeaderData.v1(new BlockHeaderData.ForkSignalSet(eightPendingForks));
assertThat(blockHeaderData.version).isEqualTo((byte) 0x01);
assertThat(blockHeaderData.toBytes()).isEqualTo(v1_8xPendingForksEncoded).hasSize(19); // writeByte(1) +
// writeSize(1) +
// writeByte(1) +
// writeShort(2) * 8
}
@Test
public void testV1HeaderDataDecoding() {
// one pending fork
BlockHeaderData blockHeaderData = BlockHeaderData.fromBytes(v1_1xPendingForkEncoded);
assertThat(blockHeaderData.version).isEqualTo((byte) 0x01);
assertTrue(blockHeaderData.signalingFork(onePendingFork[0]));
// zero pending fork
blockHeaderData = BlockHeaderData.fromBytes(v1_0xPendingForkEncoded);
assertThat(blockHeaderData.version).isEqualTo((byte) 0x01);
assertFalse(blockHeaderData.signalingFork(onePendingFork[0]));
// eight pending forks
blockHeaderData = BlockHeaderData.fromBytes(v1_8xPendingForksEncoded);
assertThat(blockHeaderData.version).isEqualTo((byte) 0x01);
for (ValidatorActivatedFork f : eightPendingForks) {
assertTrue(blockHeaderData.signalingFork(f));
}
}
@Test
public void testUnrecognizedHeaderDataEncoding() {
BlockHeaderData blockHeaderData = mock(BlockHeaderData.class, withSettings()
.useConstructor((byte) 0xff, unrecognized_reservedDataEncoded));
when(blockHeaderData.toBytes()).thenCallRealMethod();
assertThat(blockHeaderData.toBytes()).isEqualTo(unrecognized_headerDataEncoded).hasSize(32);
assertFalse(blockHeaderData.signalingFork(ValidatorActivatedFork.UNIFORM_DISTRIBUTION));
}
@Test
public void testUnrecognizedHeaderDataDecoding() {
BlockHeaderData blockHeaderData = BlockHeaderData.fromBytes(unrecognized_headerDataEncoded);
assertThat(blockHeaderData.version).isEqualTo((byte) 0xff);
assertThat((byte[]) Whitebox.getInternalState(blockHeaderData, "reserved"))
.isEqualTo(unrecognized_reservedDataEncoded);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import java.io.File;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.semux.Kernel;
import org.semux.net.filter.SemuxIpFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Channel Manager.
*
*/
public class ChannelManager {
private static final Logger logger = LoggerFactory.getLogger(ChannelManager.class);
/**
* All channels, indexed by the <code>remoteAddress (ip + port)</code>, not
* necessarily the listening address.
*/
protected ConcurrentHashMap<InetSocketAddress, Channel> channels = new ConcurrentHashMap<>();
protected ConcurrentHashMap<String, Channel> activeChannels = new ConcurrentHashMap<>();
protected final SemuxIpFilter ipFilter;
public ChannelManager(Kernel kernel) {
ipFilter = new SemuxIpFilter.Loader()
.load(new File(kernel.getConfig().configDir(), SemuxIpFilter.CONFIG_FILE).toPath());
}
/**
* Returns the IP filter if enabled.
*
* @return
*/
public SemuxIpFilter getIpFilter() {
return ipFilter;
}
/**
* Returns whether a connection from the given address is acceptable or not.
*
* @param address
* @return
*/
public boolean isAcceptable(InetSocketAddress address) {
return ipFilter == null || ipFilter.isAcceptable(address);
}
/**
* Returns whether a socket address is connected.
*
* @param address
* @return
*/
public boolean isConnected(InetSocketAddress address) {
return channels.containsKey(address);
}
/**
* Returns whether the specified IP is connected.
*
* @param ip
* @return
*/
public boolean isActiveIP(String ip) {
for (Channel c : activeChannels.values()) {
if (c.getRemoteIp().equals(ip)) {
return true;
}
}
return false;
}
/**
* Returns whether the specified peer is connected.
*
* @param peerId
* @return
*/
public boolean isActivePeer(String peerId) {
return activeChannels.containsKey(peerId);
}
/**
* Returns the number of channels.
*
* @return
*/
public int size() {
return channels.size();
}
/**
* Adds a new channel to this manager.
*
* @param ch
* channel instance
*/
public void add(Channel ch) {
logger.debug("Channel added: remoteAddress = {}:{}", ch.getRemoteIp(), ch.getRemotePort());
channels.put(ch.getRemoteAddress(), ch);
}
/**
* Removes a disconnected channel from this manager.
*
* @param ch
* channel instance
*/
public void remove(Channel ch) {
logger.debug("Channel removed: remoteAddress = {}:{}", ch.getRemoteIp(), ch.getRemotePort());
channels.remove(ch.getRemoteAddress());
if (ch.isActive()) {
activeChannels.remove(ch.getRemotePeer().getPeerId());
ch.onInactive();
}
}
/**
* Remove blacklisted channels.
*/
public void removeBlacklistedChannels() {
for (Map.Entry<InetSocketAddress, Channel> channelEntry : channels.entrySet()) {
if (!isAcceptable(channelEntry.getValue().getRemoteAddress())) {
remove(channelEntry.getValue());
}
}
}
/**
* When a channel becomes active.
*
* @param channel
* @param peer
*/
public void onChannelActive(Channel channel, Peer peer) {
channel.onActive(peer);
activeChannels.put(peer.getPeerId(), channel);
}
/**
* Returns a copy of the active peers.
*
* @return
*/
public List<Peer> getActivePeers() {
List<Peer> list = new ArrayList<>();
for (Channel c : activeChannels.values()) {
list.add(c.getRemotePeer());
}
return list;
}
/**
* Returns the listening IP addresses of active peers.
*
* @return
*/
public Set<InetSocketAddress> getActiveAddresses() {
Set<InetSocketAddress> set = new HashSet<>();
for (Channel c : activeChannels.values()) {
Peer p = c.getRemotePeer();
set.add(new InetSocketAddress(p.getIp(), p.getPort()));
}
return set;
}
/**
* Returns the active channels.
*
* @return
*/
public List<Channel> getActiveChannels() {
List<Channel> list = new ArrayList<>();
list.addAll(activeChannels.values());
return list;
}
/**
* Returns the active channels, filtered by peerId.
*
* @param peerIds
* peerId filter
* @return
*/
public List<Channel> getActiveChannels(List<String> peerIds) {
List<Channel> list = new ArrayList<>();
for (String peerId : peerIds) {
if (activeChannels.containsKey(peerId)) {
list.add(activeChannels.get(peerId));
}
}
return list;
}
/**
* Returns the active channels, whose message queue is idle.
*
* @return
*/
public List<Channel> getIdleChannels() {
List<Channel> list = new ArrayList<>();
for (Channel c : activeChannels.values()) {
if (c.getMessageQueue().isIdle()) {
list.add(c);
}
}
return list;
}
}<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.message;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.semux.util.Bytes;
public class Verifier {
public static void main(String[] args) throws IOException {
Properties props = new Properties();
props.load(Verifier.class.getResourceAsStream("/org/semux/gui/messages.properties"));
props.load(Verifier.class.getResourceAsStream("/org/semux/cli/messages.properties"));
Collection<File> files = FileUtils.listFiles(new File("src/main/java/org/semux"), new String[] { "java" },
true);
int n = 0;
for (File file : files) {
String content = FileUtils.readFileToString(file, Bytes.CHARSET);
Pattern p = Pattern.compile("Messages.get\\(\"(.+?)\"");
Matcher m = p.matcher(content);
while (m.find()) {
n++;
if (!props.containsKey(m.group(1))) {
System.err.println(m.group(1) + " is not in the messages.properties!");
}
}
}
System.out.println(n);
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Deque;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
import org.semux.Kernel;
import org.semux.Network;
import org.semux.config.Config;
import org.semux.config.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
public class NodeManager {
private static final Logger logger = LoggerFactory.getLogger(NodeManager.class);
private static final ThreadFactory factory = new ThreadFactory() {
private AtomicInteger cnt = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "node-" + cnt.getAndIncrement());
}
};
private static final long MAX_QUEUE_SIZE = 1024;
private static final int LRU_CACHE_SIZE = 1024;
private static final long RECONNECT_WAIT = 2L * 60L * 1000L;
private Kernel kernel;
private Config config;
private ChannelManager channelMgr;
private PeerClient client;
private Deque<Node> deque = new ConcurrentLinkedDeque<>();
private Cache<Node, Long> lastConnect = Caffeine.newBuilder().maximumSize(LRU_CACHE_SIZE).build();
private ScheduledExecutorService exec;
private ScheduledFuture<?> connectFuture;
private ScheduledFuture<?> fetchFuture;
private volatile boolean isRunning;
/**
* Creates a node manager instance.
*
* @param kernel
*/
public NodeManager(Kernel kernel) {
this.kernel = kernel;
this.config = kernel.getConfig();
this.channelMgr = kernel.getChannelManager();
this.client = kernel.getClient();
this.exec = Executors.newSingleThreadScheduledExecutor(factory);
}
/**
* Starts the node manager
*/
public synchronized void start() {
if (!isRunning) {
addNodes(config.p2pSeedNodes());
// every 0.5 seconds
connectFuture = exec.scheduleAtFixedRate(this::doConnect, 100, 500, TimeUnit.MILLISECONDS);
// every 100 seconds, delayed by 5 seconds (public IP lookup)
fetchFuture = exec.scheduleAtFixedRate(this::doFetch, 5, 100, TimeUnit.SECONDS);
isRunning = true;
logger.info("Node manager started");
}
}
/**
* Stops this node manager.
*/
public synchronized void stop() {
if (isRunning) {
connectFuture.cancel(true);
fetchFuture.cancel(false);
isRunning = false;
logger.info("Node manager stopped");
}
}
/**
* Returns if the node manager is running or not.
*
* @return true if running, otherwise false
*/
public boolean isRunning() {
return isRunning;
}
/**
* Add a node to the connection queue.
*
* @param node
*/
public void addNode(Node node) {
deque.addFirst(node);
while (queueSize() > MAX_QUEUE_SIZE) {
deque.removeLast();
}
}
/**
* Add a collection of nodes to the connection queue.
*
* @param nodes
*/
public void addNodes(Collection<Node> nodes) {
for (Node node : nodes) {
addNode(node);
}
}
/**
* Get the connection queue size.
*
* @return
*/
public int queueSize() {
return deque.size();
}
/**
* Get seed nodes from DNS records.
*
* @param network
* @return
*/
public Set<Node> getSeedNodes(Network network) {
Set<Node> nodes = new HashSet<>();
List<String> names;
switch (network) {
case MAINNET:
names = kernel.getConfig().netDnsSeedsMainNet();
break;
case TESTNET:
names = kernel.getConfig().netDnsSeedsTestNet();
break;
default:
return nodes;
}
names.parallelStream()
.filter(Objects::nonNull)
.map(String::trim)
.map(name -> {
try {
return InetAddress.getAllByName(name);
} catch (UnknownHostException e) {
logger.warn("Failed to get seed nodes from {}", name);
return new InetAddress[0];
}
})
.flatMap(Stream::of)
.forEach(address -> nodes.add(new Node(address.getHostAddress(), Constants.DEFAULT_P2P_PORT)));
return nodes;
}
/**
* Connect to a node in the queue.
*/
protected void doConnect() {
Set<InetSocketAddress> activeAddresses = channelMgr.getActiveAddresses();
Node node;
while ((node = deque.pollFirst()) != null && channelMgr.size() < config.netMaxOutboundConnections()) {
Long lastTouch = lastConnect.getIfPresent(node);
long now = System.currentTimeMillis();
if (!client.getNode().equals(node)
&& !activeAddresses.contains(node.toAddress())
&& (lastTouch == null || lastTouch + RECONNECT_WAIT < now)) {
SemuxChannelInitializer ci = new SemuxChannelInitializer(kernel, node);
client.connect(node, ci);
lastConnect.put(node, now);
break;
}
}
}
/**
* Fetches seed nodes from DNS records or configuration.
*/
protected void doFetch() {
addNodes(getSeedNodes(config.network()));
}
/**
* Represents a node in the semux network.
*/
public static class Node {
private InetSocketAddress address;
/**
* Construct a node with the given socket address.
*
* @param address
*/
public Node(InetSocketAddress address) {
this.address = address;
}
/**
* Construct a node with the given IP address and port.
*
* @param ip
* @param port
*/
public Node(InetAddress ip, int port) {
this(new InetSocketAddress(ip, port));
}
/**
* Construct a node with the given IP address and port.
*
* @param ip
* IP address, or hostname (not encouraged to use)
* @param port
* port number
*/
public Node(String ip, int port) {
this(new InetSocketAddress(ip, port));
}
/**
* Returns the IP address.
*
* @return
*/
public String getIp() {
return address.getAddress().getHostAddress();
}
/**
* Returns the port number
*
* @return
*/
public int getPort() {
return address.getPort();
}
/**
* Converts into a socket address.
*
* @return
*/
public InetSocketAddress toAddress() {
return address;
}
@Override
public int hashCode() {
return address.hashCode();
}
@Override
public boolean equals(Object o) {
return o instanceof Node && address.equals(((Node) o).toAddress());
}
@Override
public String toString() {
return getIp() + ":" + getPort();
}
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux;
import static org.awaitility.Awaitility.await;
import org.junit.Rule;
import org.junit.Test;
import org.semux.Kernel.State;
import org.semux.rules.KernelRule;
public class KernelTest {
@Rule
public KernelRule kernelRule = new KernelRule(15160, 15170);
@Test
public void testStart() {
Kernel kernel = kernelRule.getKernel();
// start kernel
kernel.start();
await().until(() -> kernel.getNodeManager().isRunning()
&& kernel.getPendingManager().isRunning()
&& kernel.getApi().isRunning()
&& kernel.getP2p().isRunning()
&& kernel.getConsensus().isRunning()
&& !kernel.getSyncManager().isRunning());
// stop kernel
kernel.stop();
await().until(() -> kernel.state == State.STOPPED
&& !kernel.getNodeManager().isRunning()
&& !kernel.getPendingManager().isRunning()
&& !kernel.getApi().isRunning()
&& !kernel.getP2p().isRunning()
&& !kernel.getConsensus().isRunning()
&& !kernel.getSyncManager().isRunning());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.Arrays;
import org.junit.Test;
public class BytesTest {
@Test
public void testRandom() {
int n = 20;
byte[] bytes1 = Bytes.random(n);
byte[] bytes2 = Bytes.random(n);
assertEquals(n, bytes1.length);
assertEquals(n, bytes2.length);
assertFalse(Arrays.equals(bytes1, bytes2));
}
@Test
public void testMerge() {
byte[] bytes1 = Bytes.of("Hello");
byte[] bytes2 = Bytes.of("World");
assertEquals("HelloWorld", Bytes.toString(Bytes.merge(bytes1, bytes2)));
}
@Test
public void testString() {
byte[] bytes1 = Bytes.of("test");
byte[] bytes2 = Bytes.of(Bytes.toString(bytes1));
assertArrayEquals(bytes1, bytes2);
}
@Test
public void testShort() {
short s1 = Short.MIN_VALUE;
short s2 = 0;
short s3 = Short.MAX_VALUE;
assertEquals(s1, Bytes.toShort(Bytes.of(s1)));
assertEquals(s2, Bytes.toShort(Bytes.of(s2)));
assertEquals(s3, Bytes.toShort(Bytes.of(s3)));
}
@Test
public void testInt() {
int i1 = Integer.MIN_VALUE;
int i2 = 0;
int i3 = Integer.MAX_VALUE;
assertEquals(i1, Bytes.toInt(Bytes.of(i1)));
assertEquals(i2, Bytes.toInt(Bytes.of(i2)));
assertEquals(i3, Bytes.toInt(Bytes.of(i3)));
}
@Test
public void testLong() {
long l1 = Long.MIN_VALUE;
long l2 = 0;
long l3 = Long.MAX_VALUE;
assertEquals(l1, Bytes.toLong(Bytes.of(l1)));
assertEquals(l2, Bytes.toLong(Bytes.of(l2)));
assertEquals(l3, Bytes.toLong(Bytes.of(l3)));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.consensus;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class NewHeightMessageTest {
@Test
public void testSerialization() {
int height = 1;
NewHeightMessage msg = new NewHeightMessage(height);
NewHeightMessage msg2 = new NewHeightMessage(msg.getEncoded());
assertEquals(height, msg2.getHeight());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.db;
import java.io.IOException;
import java.nio.file.Path;
public interface DatabaseFactory {
/**
* Returns a KVDB instance for the specified database.
*
* @param name
* @return
*/
Database getDB(DatabaseName name);
/**
* Open resources.
*/
void open() throws IOException;
/**
* Close all opened resources.
*/
void close();
/**
* Returns the data directory of created databases.
*
* @return
*/
Path getDataDir();
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
public class StringUtil {
/**
* Returns if the given string is null or empty.
*
* @param str
* @return
*/
public static boolean isNullOrEmpty(String str) {
return str == null || str.isEmpty();
}
private StringUtil() {
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api.response;
import java.util.List;
import java.util.stream.Collectors;
import org.semux.Kernel;
import org.semux.core.Amount;
import org.semux.core.Block;
import org.semux.core.BlockchainImpl;
import org.semux.core.Transaction;
import org.semux.core.state.Account;
import org.semux.core.state.Delegate;
import org.semux.crypto.Hex;
import org.semux.net.Peer;
import org.semux.util.TimeUtil;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
public class Types {
public static class AccountType {
public final String address;
public final long available;
public final long locked;
public final long nonce;
public final int transactionCount;
@JsonCreator
public AccountType(
@JsonProperty("address") String address,
@JsonProperty("available") long available,
@JsonProperty("locked") long locked,
@JsonProperty("nonce") long nonce,
@JsonProperty("transactionCount") int transactionCount) {
this.address = address;
this.available = available;
this.locked = locked;
this.nonce = nonce;
this.transactionCount = transactionCount;
}
public AccountType(Account account, int transactionCount) {
this(Hex.encode0x(account.getAddress()),
encodeAmount(account.getAvailable()),
encodeAmount(account.getLocked()),
account.getNonce(),
transactionCount);
}
}
public static class BlockType {
@JsonProperty("hash")
public final String hash;
@JsonProperty("number")
public final Long number;
@JsonProperty("view")
public final Integer view;
@JsonProperty("coinbase")
public final String coinbase;
@JsonProperty("parentHash")
public final String parentHash;
@JsonProperty("timestamp")
public final Long timestamp;
@JsonProperty("date")
public final String date;
@JsonProperty("transactionsRoot")
public final String transactionsRoot;
@JsonProperty("resultsRoot")
public final String resultsRoot;
@JsonProperty("stateRoot")
public final String stateRoot;
@JsonProperty("data")
public final String data;
@JsonProperty("transactions")
public final List<Types.TransactionType> transactions;
public BlockType(
@JsonProperty("hash") String hash,
@JsonProperty("number") Long number,
@JsonProperty("view") Integer view,
@JsonProperty("coinbase") String coinbase,
@JsonProperty("parentHash") String parentHash,
@JsonProperty("timestamp") Long timestamp,
@JsonProperty("date") String date,
@JsonProperty("transactionsRoot") String transactionsRoot,
@JsonProperty("resultsRoot") String resultsRoot,
@JsonProperty("stateRoot") String stateRoot,
@JsonProperty("data") String data,
@JsonProperty("transactions") List<Types.TransactionType> transactions) {
this.hash = hash;
this.number = number;
this.view = view;
this.coinbase = coinbase;
this.parentHash = parentHash;
this.timestamp = timestamp;
this.date = date;
this.transactionsRoot = transactionsRoot;
this.resultsRoot = resultsRoot;
this.stateRoot = stateRoot;
this.data = data;
this.transactions = transactions;
}
public BlockType(Block block) {
this(Hex.encode0x(block.getHash()),
block.getNumber(),
block.getView(),
Hex.encode0x(block.getCoinbase()),
Hex.encode0x(block.getParentHash()),
block.getTimestamp(),
TimeUtil.formatTimestamp(block.getTimestamp()),
Hex.encode0x(block.getTransactionsRoot()),
Hex.encode0x(block.getResultsRoot()),
Hex.encode0x(block.getStateRoot()),
Hex.encode0x(block.getData()),
block.getTransactions().stream()
.map(tx -> new Types.TransactionType(block.getNumber(), tx))
.collect(Collectors.toList()));
}
}
public static class DelegateType {
@JsonProperty("address")
public final String address;
@JsonProperty("name")
public final String name;
@JsonProperty("registeredAt")
public final Long registeredAt;
@JsonProperty("votes")
public final Long votes;
@JsonProperty("blocksForged")
public final Long blocksForged;
@JsonProperty("turnsHit")
public final Long turnsHit;
@JsonProperty("turnsMissed")
public final Long turnsMissed;
public DelegateType(BlockchainImpl.ValidatorStats validatorStats, Delegate delegate) {
this(Hex.PREF + delegate.getAddressString(),
delegate.getNameString(),
delegate.getRegisteredAt(),
encodeAmount(delegate.getVotes()),
validatorStats.getBlocksForged(),
validatorStats.getTurnsHit(),
validatorStats.getTurnsMissed());
}
public DelegateType(
@JsonProperty("address") String address,
@JsonProperty("name") String name,
@JsonProperty("registeredAt") Long registeredAt,
@JsonProperty("votes") Long votes,
@JsonProperty("blocksForged") Long blocksForged,
@JsonProperty("turnsHit") Long turnsHit,
@JsonProperty("turnsMissed") Long turnsMissed) {
this.address = address;
this.name = name;
this.registeredAt = registeredAt;
this.votes = votes;
this.blocksForged = blocksForged;
this.turnsHit = turnsHit;
this.turnsMissed = turnsMissed;
}
}
public static class InfoType {
@JsonProperty("clientId")
public final String clientId;
@JsonProperty("coinbase")
public final String coinbase;
@JsonProperty("latestBlockNumber")
public final Long latestBlockNumber;
@JsonProperty("latestBlockHash")
public final String latestBlockHash;
@JsonProperty("activePeers")
public final Integer activePeers;
@JsonProperty("pendingTransactions")
public final Integer pendingTransactions;
public InfoType(
@JsonProperty("clientId") String clientId,
@JsonProperty("coinbase") String coinbase,
@JsonProperty("latestBlockNumber") Long latestBlockNumber,
@JsonProperty("latestBlockHash") String latestBlockHash,
@JsonProperty("activePeers") Integer activePeers,
@JsonProperty("pendingTransactions") Integer pendingTransactions) {
this.clientId = clientId;
this.coinbase = coinbase;
this.latestBlockNumber = latestBlockNumber;
this.latestBlockHash = latestBlockHash;
this.activePeers = activePeers;
this.pendingTransactions = pendingTransactions;
}
public InfoType(Kernel kernel) {
this(kernel.getConfig().getClientId(),
Hex.PREF + kernel.getCoinbase(),
kernel.getBlockchain().getLatestBlockNumber(),
Hex.encode0x(kernel.getBlockchain().getLatestBlockHash()),
kernel.getChannelManager().getActivePeers().size(),
kernel.getPendingManager().getPendingTransactions().size());
}
}
public static class PeerType {
@JsonProperty("ip")
public final String ip;
@JsonProperty("port")
public final Integer port;
@JsonProperty("networkVersion")
public final Short networkVersion;
@JsonProperty("clientId")
public final String clientId;
@JsonProperty("peerId")
public final String peerId;
@JsonProperty("latestBlockNumber")
public final Long latestBlockNumber;
@JsonProperty("latency")
public final Long latency;
@JsonProperty("capabilities")
public final List<String> capabilities;
public PeerType(
@JsonProperty("ip") String ip,
@JsonProperty("port") int port,
@JsonProperty("networkVersion") short networkVersion,
@JsonProperty("clientId") String clientId,
@JsonProperty("peerId") String peerId,
@JsonProperty("latestBlockNumber") long latestBlockNumber,
@JsonProperty("latency") long latency,
@JsonProperty("capabilities") List<String> capabilities) {
this.ip = ip;
this.port = port;
this.networkVersion = networkVersion;
this.clientId = clientId;
this.peerId = peerId;
this.latestBlockNumber = latestBlockNumber;
this.latency = latency;
this.capabilities = capabilities;
}
public PeerType(Peer peer) {
this(peer.getIp(),
peer.getPort(),
peer.getNetworkVersion(),
peer.getClientId(),
Hex.PREF + peer.getPeerId(),
peer.getLatestBlockNumber(),
peer.getLatency(),
peer.getCapabilities().toList());
}
}
public static class TransactionLimitsType {
@JsonProperty("maxTransactionDataSize")
public final Integer maxTransactionDataSize;
@JsonProperty("minTransactionFee")
public final Long minTransactionFee;
@JsonProperty("minDelegateBurnAmount")
@JsonInclude(JsonInclude.Include.NON_NULL)
public final Long minDelegateBurnAmount;
@JsonCreator
public TransactionLimitsType(
@JsonProperty("maxTransactionDataSize") Integer maxTransactionDataSize,
@JsonProperty("minTransactionFee") Long minTransactionFee,
@JsonProperty("minDelegateBurnAmount") Long minDelegateBurnAmount) {
this.maxTransactionDataSize = maxTransactionDataSize;
this.minTransactionFee = minTransactionFee;
this.minDelegateBurnAmount = minDelegateBurnAmount;
}
public TransactionLimitsType(
Integer maxTransactionDataSize,
Amount minTransactionFee,
Amount minDelegateBurnAmount) {
this(maxTransactionDataSize,
encodeAmount(minTransactionFee),
encodeAmount(minDelegateBurnAmount));
}
}
public static class TransactionType {
@JsonProperty("blockNumber")
public final Long blockNumber;
@JsonProperty("hash")
public final String hash;
@JsonProperty("type")
public final String type;
@JsonProperty("from")
public final String from;
@JsonProperty("to")
public final String to;
@JsonProperty("value")
public final Long value;
@JsonProperty("fee")
public final Long fee;
@JsonProperty("nonce")
public final Long nonce;
@JsonProperty("timestamp")
public final Long timestamp;
@JsonProperty("data")
public final String data;
public TransactionType(
@JsonProperty("blockNumber") Long blockNumber,
@JsonProperty("hash") String hash,
@JsonProperty("type") String type,
@JsonProperty("from") String from,
@JsonProperty("to") String to,
@JsonProperty("value") Long value,
@JsonProperty("fee") Long fee,
@JsonProperty("nonce") Long nonce,
@JsonProperty("timestamp") Long timestamp,
@JsonProperty("data") String data) {
this.blockNumber = blockNumber;
this.hash = hash;
this.type = type;
this.from = from;
this.to = to;
this.value = value;
this.fee = fee;
this.nonce = nonce;
this.timestamp = timestamp;
this.data = data;
}
public TransactionType(Long blockNumber, Transaction tx) {
this(blockNumber,
Hex.encode0x(tx.getHash()),
tx.getType().toString(),
Hex.encode0x(tx.getFrom()),
Hex.encode0x(tx.getTo()),
encodeAmount(tx.getValue()),
encodeAmount(tx.getFee()),
tx.getNonce(),
tx.getTimestamp(),
Hex.encode0x(tx.getData()));
}
}
private static Long encodeAmount(Amount a) {
return a == null ? null : a.getNano();
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.consensus;
import org.semux.consensus.Vote;
import org.semux.net.msg.Message;
import org.semux.net.msg.MessageCode;
public class VoteMessage extends Message {
private Vote vote;
public VoteMessage(Vote vote) {
super(MessageCode.BFT_VOTE, null);
this.vote = vote;
this.encoded = vote.toBytes();
}
public VoteMessage(byte[] encoded) {
super(MessageCode.BFT_VOTE, null);
this.encoded = encoded;
this.vote = Vote.fromBytes(encoded);
}
public Vote getVote() {
return vote;
}
@Override
public String toString() {
return "BFTVoteMessage: " + vote;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.msg.consensus;
import org.semux.net.msg.Message;
import org.semux.net.msg.MessageCode;
import org.semux.util.SimpleDecoder;
import org.semux.util.SimpleEncoder;
public class GetBlockHeaderMessage extends Message {
private long number;
public GetBlockHeaderMessage(long number) {
super(MessageCode.GET_BLOCK_HEADER, BlockMessage.class);
this.number = number;
SimpleEncoder enc = new SimpleEncoder();
enc.writeLong(number);
this.encoded = enc.toBytes();
}
public GetBlockHeaderMessage(byte[] encoded) {
super(MessageCode.GET_BLOCK_HEADER, BlockMessage.class);
this.encoded = encoded;
SimpleDecoder dec = new SimpleDecoder(encoded);
this.number = dec.readLong();
}
public long getNumber() {
return number;
}
@Override
public String toString() {
return "GetBlockHeaderMessage [number=" + number + "]";
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.net.filter;
import java.io.File;
abstract class SemuxIpFilterTestBase {
/**
* Get a testing ipfilter.json from resource bundle
*
* @param fileName
* @return
*/
protected static File getFile(String fileName) {
return new File(SemuxIpFilterLoaderTest.class.getResource("/ipfilter/" + fileName).getFile());
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui.panel;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.SwingConstants;
import javax.swing.border.LineBorder;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import org.semux.Kernel;
import org.semux.core.Wallet;
import org.semux.crypto.Hex;
import org.semux.crypto.Key;
import org.semux.gui.Action;
import org.semux.gui.SemuxGui;
import org.semux.gui.SwingUtil;
import org.semux.gui.model.WalletAccount;
import org.semux.gui.model.WalletModel;
import org.semux.message.GuiMessages;
import org.semux.util.exception.UnreachableException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.zxing.WriterException;
public class ReceivePanel extends JPanel implements ActionListener {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(ReceivePanel.class);
private static final String[] columnNames = { GuiMessages.get("Num"), GuiMessages.get("Name"),
GuiMessages.get("Address"), GuiMessages.get("Available"), GuiMessages.get("Locked") };
private static final int QR_SIZE = 200;
private transient final SemuxGui gui;
private transient final WalletModel model;
private transient final Kernel kernel;
private final JTable table;
private final ReceiveTableModel tableModel;
private final JLabel qr;
public ReceivePanel(SemuxGui gui) {
this.gui = gui;
this.model = gui.getModel();
this.kernel = gui.getKernel();
this.model.addListener(this);
tableModel = new ReceiveTableModel();
table = new JTable(tableModel);
table.setName("accountsTable");
table.setBackground(Color.WHITE);
table.setFillsViewportHeight(true);
table.setGridColor(Color.LIGHT_GRAY);
table.setRowHeight(25);
table.getTableHeader().setPreferredSize(new Dimension(10000, 24));
SwingUtil.setColumnWidths(table, 600, 0.05, 0.1, 0.55, 0.15, 0.15);
SwingUtil.setColumnAlignments(table, false, false, false, true, true);
table.getSelectionModel().addListSelectionListener(
ev -> actionPerformed(new ActionEvent(ReceivePanel.this, 0, Action.SELECT_ACCOUNT.name())));
// customized table sorter
TableRowSorter<TableModel> sorter = new TableRowSorter<>(table.getModel());
sorter.setComparator(0, SwingUtil.NUMBER_COMPARATOR);
sorter.setComparator(3, SwingUtil.VALUE_COMPARATOR);
sorter.setComparator(4, SwingUtil.VALUE_COMPARATOR);
table.setRowSorter(sorter);
JScrollPane scrollPane = new JScrollPane(table);
scrollPane.setBorder(new LineBorder(Color.LIGHT_GRAY));
qr = new JLabel("");
qr.setIcon(SwingUtil.emptyImage(QR_SIZE, QR_SIZE));
qr.setBorder(new LineBorder(Color.LIGHT_GRAY));
JButton btnCopyAddress = SwingUtil
.createDefaultButton(GuiMessages.get("CopyAddress"), this, Action.COPY_ADDRESS);
btnCopyAddress.setName("btnCopyAddress");
JButton buttonNewAccount = SwingUtil
.createDefaultButton(GuiMessages.get("NewAccount"), this, Action.NEW_ACCOUNT);
buttonNewAccount.setName("buttonNewAccount");
JButton btnDeleteAddress = SwingUtil
.createDefaultButton(GuiMessages.get("DeleteAccount"), this, Action.DELETE_ACCOUNT);
btnDeleteAddress.setName("btnDeleteAddress");
JButton btnAddressBook = SwingUtil
.createDefaultButton(GuiMessages.get("AddressBook"), this, Action.SHOW_ADDRESS_BOOK);
btnAddressBook.setName("btnAddressBook");
// @formatter:off
GroupLayout groupLayout = new GroupLayout(this);
groupLayout.setHorizontalGroup(
groupLayout.createParallelGroup(Alignment.TRAILING)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(scrollPane, GroupLayout.DEFAULT_SIZE, 505, Short.MAX_VALUE)
.addGap(18)
.addGroup(groupLayout.createParallelGroup(Alignment.LEADING)
.addComponent(btnAddressBook, GroupLayout.PREFERRED_SIZE, 121, GroupLayout.PREFERRED_SIZE)
.addComponent(btnDeleteAddress)
.addComponent(buttonNewAccount)
.addComponent(btnCopyAddress)
.addComponent(qr)))
);
groupLayout.setVerticalGroup(
groupLayout.createParallelGroup(Alignment.LEADING)
.addGroup(groupLayout.createSequentialGroup()
.addComponent(qr)
.addGap(18)
.addComponent(btnCopyAddress)
.addGap(18)
.addComponent(buttonNewAccount)
.addGap(18)
.addComponent(btnDeleteAddress)
.addGap(18)
.addComponent(btnAddressBook)
.addContainerGap(249, Short.MAX_VALUE))
.addComponent(scrollPane, GroupLayout.DEFAULT_SIZE, 537, Short.MAX_VALUE)
);
groupLayout.linkSize(SwingConstants.HORIZONTAL, btnCopyAddress, btnAddressBook, buttonNewAccount, btnDeleteAddress);
setLayout(groupLayout);
// @formatter:on
refresh();
}
private static class ReceiveTableModel extends AbstractTableModel {
private static final long serialVersionUID = 1L;
private transient List<WalletAccount> data;
public ReceiveTableModel() {
this.data = Collections.emptyList();
}
public void setData(List<WalletAccount> data) {
this.data = data;
this.fireTableDataChanged();
}
public WalletAccount getRow(int row) {
if (row >= 0 && row < data.size()) {
return data.get(row);
}
return null;
}
@Override
public int getRowCount() {
return data.size();
}
@Override
public int getColumnCount() {
return columnNames.length;
}
@Override
public String getColumnName(int column) {
return columnNames[column];
}
@Override
public Object getValueAt(int row, int column) {
WalletAccount acc = data.get(row);
switch (column) {
case 0:
return SwingUtil.formatNumber(row);
case 1:
return acc.getName().orElse("");
case 2:
return Hex.PREF + acc.getKey().toAddressString();
case 3:
return SwingUtil.formatAmount(acc.getAvailable());
case 4:
return SwingUtil.formatAmount(acc.getLocked());
default:
return null;
}
}
}
@Override
public synchronized void actionPerformed(ActionEvent e) {
Action action = Action.valueOf(e.getActionCommand());
switch (action) {
case REFRESH:
refresh();
break;
case SELECT_ACCOUNT:
selectAccount();
break;
case COPY_ADDRESS:
copyAddress();
break;
case NEW_ACCOUNT:
newAccount();
break;
case DELETE_ACCOUNT:
deleteAccount();
break;
case SHOW_ADDRESS_BOOK:
showAddressBook();
break;
default:
throw new UnreachableException();
}
}
/**
* Processes the REFRESH event.
*/
protected void refresh() {
List<WalletAccount> accounts = model.getAccounts();
/*
* update table model
*/
WalletAccount acc = getSelectedAccount();
tableModel.setData(accounts);
if (acc != null) {
for (int i = 0; i < accounts.size(); i++) {
if (Arrays.equals(accounts.get(i).getKey().toAddress(), acc.getKey().toAddress())) {
table.setRowSelectionInterval(table.convertRowIndexToView(i), table.convertRowIndexToView(i));
break;
}
}
} else if (!accounts.isEmpty()) {
table.setRowSelectionInterval(0, 0);
}
selectAccount();
}
/**
* Processes the SELECT_ACCOUNT event.
*/
protected void selectAccount() {
try {
WalletAccount acc = getSelectedAccount();
if (acc != null) {
BufferedImage bi = SwingUtil.createQrImage("semux://" + Hex.PREF + acc.getKey().toAddressString(),
QR_SIZE, QR_SIZE);
qr.setIcon(new ImageIcon(bi));
} else {
qr.setIcon(SwingUtil.emptyImage(QR_SIZE, QR_SIZE));
}
} catch (WriterException exception) {
logger.error("Unable to generate QR code", exception);
}
}
/**
* Processes the COPY_ADDRESS event
*/
protected void copyAddress() {
WalletAccount acc = getSelectedAccount();
if (acc == null) {
JOptionPane.showMessageDialog(this, GuiMessages.get("SelectAccount"));
} else {
String address = Hex.PREF + acc.getKey().toAddressString();
StringSelection stringSelection = new StringSelection(address);
Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
clipboard.setContents(stringSelection, null);
JOptionPane.showMessageDialog(this, GuiMessages.get("AddressCopied", address));
}
}
/**
* Process the RENAME_ACCOUNT event
*/
protected void showAddressBook() {
gui.getAddressBookDialog().setVisible(true);
}
/**
* Processes the NEW_ACCOUNT event.
*/
protected void newAccount() {
Key key = new Key();
Wallet wallet = kernel.getWallet();
wallet.addAccount(key);
boolean added = wallet.flush();
if (added) {
gui.updateModel();
JOptionPane.showMessageDialog(this, GuiMessages.get("NewAccountCreated"));
} else {
wallet.removeAccount(key);
JOptionPane.showMessageDialog(this, GuiMessages.get("WalletSaveFailed"));
}
}
/**
* Processes the DELETE_ACCOUNT event.
*/
protected void deleteAccount() {
WalletAccount acc = getSelectedAccount();
if (acc == null) {
JOptionPane.showMessageDialog(this, GuiMessages.get("SelectAccount"));
} else {
int ret = JOptionPane
.showConfirmDialog(this, GuiMessages.get("ConfirmDeleteAccount"), GuiMessages.get("DeleteAccount"),
JOptionPane.YES_NO_OPTION);
if (ret == JOptionPane.OK_OPTION) {
Wallet wallet = kernel.getWallet();
wallet.removeAccount(acc.getKey());
wallet.flush();
gui.updateModel();
JOptionPane.showMessageDialog(this, GuiMessages.get("AccountDeleted"));
}
}
}
/**
* Returns the selected account.
*
* @return
*/
protected WalletAccount getSelectedAccount() {
int row = table.getSelectedRow();
return (row != -1) ? tableModel.getRow(table.convertRowIndexToModel(row)) : null;
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Test;
public class BasicAuthTest {
@Test
public void testAuth() {
String username = "name";
String password = "<PASSWORD>";
String auth = BasicAuth.generateAuth(username, password);
Pair<String, String> p = BasicAuth.parseAuth(auth);
assertEquals(username, p.getKey());
assertEquals(password, p.getValue());
}
@Test
public void testInvalid() {
assertNull(BasicAuth.parseAuth("invalid_auth_string"));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.api;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.junit.Test;
public class CommandTest {
@Test
public void testParseCommand() {
assertNull(Command.of("not_exists"));
assertEquals(Command.ADD_NODE, Command.of("add_node"));
}
}
<file_sep>/**
* Copyright (c) 2017-2018 The Semux Developers
*
* Distributed under the MIT software license, see the accompanying file
* LICENSE or https://opensource.org/licenses/mit-license.php
*/
package org.semux.gui;
import org.assertj.swing.edt.GuiActionRunner;
import org.assertj.swing.fixture.FrameFixture;
import org.assertj.swing.junit.testcase.AssertJSwingJUnitTestCase;
import org.junit.Test;
import org.semux.message.GuiMessages;
public class SplashScreenTest extends AssertJSwingJUnitTestCase {
@Test
public void testEvents() {
SplashScreenTestApplication application = GuiActionRunner.execute(SplashScreenTestApplication::new);
FrameFixture window = new FrameFixture(robot(), application.splashScreen);
window.requireVisible().progressBar().requireVisible().requireText(GuiMessages.get("SplashLoading"));
application.walletModel.fireSemuxEvent(SemuxEvent.WALLET_LOADING);
window.requireVisible().progressBar().requireVisible().requireText(GuiMessages.get("SplashLoadingWallet"));
application.walletModel.fireSemuxEvent(SemuxEvent.GUI_WALLET_SELECTION_DIALOG_SHOWN);
window.requireNotVisible();
application.walletModel.fireSemuxEvent(SemuxEvent.KERNEL_STARTING);
window.requireVisible().progressBar().requireVisible().requireText(GuiMessages.get("SplashStartingKernel"));
// the splash screen should be disposed as soon as the mainframe starts
application.walletModel.fireSemuxEvent(SemuxEvent.GUI_MAINFRAME_STARTED);
window.requireNotVisible();
}
@Override
protected void onSetUp() {
}
}
| 99f805ae0280a03c0d38d38ceef88034d31d44cf | [
"Markdown",
"Maven POM",
"INI",
"Java",
"Dockerfile",
"Shell"
] | 132 | Java | cryptokat/semux | 3275a52329bc35ceefb4020ebc742f7a6bccbf8e | ba46f0066e060c6d38b8829ec48f86c3e3859a7d |
refs/heads/master | <repo_name>kvineet/leetcode-solutions<file_sep>/README.md
# leetcode-solutions
Repositories of my Leetcode solutions
<file_sep>/0002_add-two-numbers.go
package main
// ListNode defines a single linked list
type ListNode struct {
Val int
Next *ListNode
}
func addTwoNumbers(l1 *ListNode, l2 *ListNode) *ListNode {
return addTwoNumbersHelper(l1, l2, 0)
}
func addTwoNumbersHelper(l1 *ListNode, l2 *ListNode, carry int) *ListNode {
if l1 == nil && l2 == nil {
if carry < 1 {
return nil
}
return &ListNode{
Val: carry,
Next: nil,
}
}
var l1d, l2d *ListNode
result := carry
if l1 != nil {
result = result + l1.Val
l1d = l1.Next
}
if l2 != nil {
result = result + l2.Val
l2d = l2.Next
}
newCarry := result / 10
return &ListNode{
Val: result % 10,
Next: addTwoNumbersHelper(l1d, l2d, newCarry),
}
}
<file_sep>/0001_two-sum.go
package main
func twoSum(nums []int, target int) []int {
var hashMap map[int]int = map[int]int{}
var left int
for i, v := range nums {
left = target - v
if val, ok := hashMap[left]; ok {
return []int{val, i}
}
hashMap[v] = i
}
return []int{-1, -1}
}
| 4e2f1c93f699d50c26e0b21db7d39c7ac4e1dd32 | [
"Markdown",
"Go"
] | 3 | Markdown | kvineet/leetcode-solutions | ba5f179e6ae16f60c7298e1b544c72f99ae9ec98 | 448093865ebf6ba60d2f62f44f18dfc79b7cf6c9 |
refs/heads/master | <file_sep>$(document).ready(function () {
$('button').click(function () {
$(this).parent().remove();
let id = $(this).data('id')
$.ajax({
method: 'DELETE',
url: 'delete/'+ id ,
beforeSend: function(xhr){
xhr.setRequestHeader('x-CSRFToken',csrf_token)
},
})
})
$('html').niceScroll({
cursorcolor: '#067e7e',
cursorborder: 'none',
scrollspeed: '100'
});
})<file_sep>from django.urls import path
from . import views
from django.contrib.auth import views as auth_views
urlpatterns = [
path('', views.index, name='index'),
path('login/', auth_views.LoginView.as_view(), name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('signup/', views.signup, name='signup'),
path('details/<pk>', views.details, name='details'),
path('add/', views.add, name='add'),
path('delete/<pk>', views.delete, name='delete'),
path('profile/', views.profile, name='profile'),
path('change_pass/', views.change_pass, name='change_pass'),
path('edit_todo/<pk>', views.edit_todo, name='edit_todo'),
path('404/', views.errorpage, name='errorpage'),
]
<file_sep># Generated by Django 2.1.1 on 2018-09-10 12:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('todolist', '0002_todo_auth'),
]
operations = [
migrations.AddField(
model_name='todo',
name='image',
field=models.ImageField(blank=True, upload_to='post_image'),
),
]
<file_sep>from django.db import models
from datetime import datetime
from django.contrib.auth.models import User
import os
# Create your models here.
class Todo(models.Model):
auth = models.ForeignKey(User, on_delete=models.CASCADE,default=1)
title = models.CharField(max_length=120)
text = models.TextField()
image = models.ImageField(upload_to='post_image', blank=True)
created_at = models.DateTimeField(default=datetime.now, blank=True)
<file_sep># DodBook App
Like FaceBook and Twitter with posts
## description
This App have CRUD on database on post and profile information, Authorization and authentication
## Some photo of App
Login Page

SignUp Page

SingUp With Error Input

Home Page After SignIn

Profile Page

ChangePassword Page

Error Page Not Found

<file_sep>from django.shortcuts import render, redirect
from .models import Todo
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, update_session_auth_hash
from django.contrib.auth.forms import UserCreationForm, PasswordChangeForm
from .forms import ProfileForm
from django.contrib.auth.decorators import login_required
# Create your views here.
from django.core.files.storage import FileSystemStorage
@login_required
def index(request):
obj = Todo.objects.all()
context = {
'todolist': reversed(obj)
}
return render(request, 'todolist/index.html', context)
@login_required
def details(request, pk):
obj = Todo.objects.get(id=pk)
context = {
'details': obj
}
return render(request, 'todolist/details.html', context)
@login_required
def add(request):
if request.method == 'POST':
title = request.POST['title']
text = request.POST['text']
image = request.FILES.get('image')
Todo(title=title, text=text, auth=request.user, image=image).save()
return redirect('index')
else:
return render(request, 'todolist/addtodo.html')
@login_required
def delete(request, pk):
if request.method == 'DELETE':
Todo.objects.get(id=pk).delete()
return redirect('index')
else:
return redirect('index')
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data['username']
password = form.cleaned_data['<PASSWORD>']
user = authenticate(username=username, password=password)
login(request, user)
return redirect('index')
else:
return render(request, 'registration/signup.html', {'form': form, 'error': True})
else:
form = UserCreationForm()
return render(request, 'registration/signup.html', {'form': form})
@login_required
def profile(request):
obj = User.objects.get(username=request.user)
if request.method == 'POST':
form = ProfileForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
return redirect('profile')
else:
return render(request, 'registration/profile.html', {'form': form, 'field': obj, 'error': True})
else:
form = ProfileForm(instance=request.user)
return render(request, 'registration/profile.html', {'form': form, 'field': obj})
@login_required
def change_pass(request):
if request.method == 'POST':
form = PasswordChangeForm(data=request.POST, user=request.user)
if form.is_valid():
form.save()
update_session_auth_hash(request, form.user)
return redirect('profile')
else:
return render(request, 'registration/chang_pass.html', {'form': form, 'error': True})
else:
form = PasswordChangeForm(user=request.user)
return render(request, 'registration/chang_pass.html', {'form': form})
@login_required
def edit_todo(request, pk):
obj = Todo.objects.get(id=pk)
if request.user == obj.auth:
if request.method == 'POST':
obj.title = request.POST['title']
obj.text = request.POST['text']
if request.FILES.get('image'):
obj.image = None
obj.image = request.FILES['image']
obj.save()
return redirect('index')
else:
return render(request, 'todolist/edit_todo.html', {'field': obj})
else:
return render(request, 'todolist/404.html')
def errorpage(request):
return render(request, 'todolist/404.html') | 21ef7522496b5ef8cd4a48c9e902d34bb92cb360 | [
"JavaScript",
"Python",
"Markdown"
] | 6 | JavaScript | MahmoudAdel1996/DodBook | 579df7bbdbb0b744cb87359f376fe0e66fe2dffa | 9ff51cdc3f5a37b8c87281c392b1e980a9339a75 |
refs/heads/master | <file_sep>// FILE: C:/Users/Lenovo/Desktop/UML//C.cs
using System.Collections;
// In this section you can add your own using directives
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BF5 begin
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BF5 end
/// <summary>
/// A class that represents ...
///
/// @see OtherClasses
/// @author your_name_here
/// </summary>
public class C
{
// Associations
@element-type A
*/
public A A_C;
/// <summary>
/// </summary>
public ArrayList B_C;
/// <summary>
/// </summary>
public ArrayList my;
} /* end class C */
<file_sep>#ifndef BD_h
#define BD_h
#include "volume.h"
class livre;
class BD : public volume {
public:
String dessinateur;
public:
/**
* @element-type livre
*/
livre *mylivre;
};
#endif // BD_h
<file_sep>// FILE: C:/Users/Lenovo/Desktop/GL tp/TP5//Document.cs
using System.Collections;
// In this section you can add your own using directives
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C7 begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C7 end
/// <summary>
/// A class that represents ...
///
/// @see OtherClasses
/// @author your_name_here
/// </summary>
public class Document
{
// Attributes
public string titre;
// Associations
/// <summary>
/// </summary>
public ArrayList mybibliothèque;
/// <summary>
/// </summary>
public ArrayList mybibliothèque;
@element-type bibliothèque
*/
public bibliothèque mybibliothèque;
} /* end class Document */
<file_sep>#include "banque.h"
#include"compte.h"
#include<iostream>
using namespace std;
banque::banque(int nl)
{
nbrelimite=nl;
agence=new compte[nbrelimite];
nbreel=0;
}
banque::~banque()
{
delete []agence;
}
void banque::add(compte c)
{
agence[nbreel]=c;
nbreel++;
}
void banque::deletee(int numc)
{
int i,j;
for(i=0;i<nbreel;i++)
{
if(agence[i].numerocompte()==numc)
{
break;
}
}
for(j=i+1;j<nbreel;j++)
{
nbreel--;
}
}
compte banque::getcompte(int numc)
{
int i;
for(i=0;i<nbreel;i)
{
if(agence[i].numerocompte()==numc)
{
break;
}
}
return agence[i];
}
void banque::affichage()
{
int i;
for(i=0;i<nbreel;i++)
{
cout<<"compte"<<i+1<<endl;
agence[i].numerocompte();
cout<<"***********************"<<endl;
}
}
<file_sep>#ifndef B_h
#define B_h
class C;
class A;
class B {
public:
C *B_C;
A *A_B;
};
#endif // B_h
<file_sep>#ifndef bibliothèque_h
#define bibliothèque_h
#include <vector>
class ;
class adhérent;
class Document;
class bibliothèque {
public:
virtual void ajouterAdherent();
virtual void enleverAdherent();
virtual void ajouterDocument();
public:
/**
* @element-type
*/
*my;
/**
* @element-type adhérent
*/
adhérent *myadhérent;
/**
* @element-type adhérent
*/
std::vector< adhérent* > myadhérent;
/**
* @element-type Document
*/
std::vector< Document* > myDocument;
/**
* @element-type adhérent
*/
std::vector< adhérent* > myadhérent;
/**
* @element-type Document
*/
Document *myDocument;
/**
* @element-type Document
*/
Document *myDocument;
};
#endif // bibliothèque_h
<file_sep>#include <iostream>
#include"compte.h"
#include"banque.h"
using namespace std;
int main(int argc, char** argv) {
compte c;
compte d("khawla",1,1000);
float x=10, y=33;
c.consulter();
c.retrait(x);
c.depot(y);
c.numerocompte();
banque b;
//banque q(20);
b.add(c);
b.deletee(1);
b.getcompte(2);
b.affichage();
return 0;
}
<file_sep>#include "Dicionnaire.h"
<file_sep>#include "Document.h"
<file_sep>#include "compte.h"
#include<iostream>
#include<string.h>
using namespace std;
compte::compte()
{
nom=new char[10];
strcpy(nom,"khawla");
num=0;
solde=0.0;
}
compte::compte(char *no,int nu,float so)
{
nom=new char[strlen(no)];
strcpy(nom,no);
num=nu;
solde=so;
}
compte::compte(const compte&obj)
{
num=obj.num;
solde=obj.solde;
nom=new char[strlen(obj.nom)];
strcpy(nom,obj.nom);
}
compte::~compte()
{
delete[]nom;
}
compte& compte::operator=(const compte &obj)
{
num=obj.num;
solde=obj.solde;
delete[]nom;
nom=new char[strlen(obj.nom)];
strcpy(nom,obj.nom);
return *this;
}
void compte::consulter()
{
cout<<"le nom du client"<<nom<<endl;
cout<<"le numero de compte est"<<num<<endl;
cout<<"le solde est"<<solde<<endl;
}
void compte::retrait(float x)
{
solde+=x;
}
void compte::depot(float y)
{
solde-=y;
}
int compte::numerocompte()
{
return num;
}
<file_sep>#ifndef adhérent_h
#define adhérent_h
#include <vector>
class bibliothèque;
class livre;
class emprute;
class adhérent {
public:
virtual void emprunterLivre();
virtual void rendreLivre();
public:
String prénom;
String nom;
public:
/**
* @element-type bibliothèque
*/
bibliothèque *mybibliothèque;
/**
* @element-type bibliothèque
*/
bibliothèque *mybibliothèque;
/**
* @element-type livre
*/
livre *mylivre;
/**
* @element-type emprute
*/
emprute *myemprute;
/**
* @element-type livre
*/
std::vector< livre* > mylivre;
/**
* @element-type emprute
*/
emprute *myemprute;
/**
* @element-type livre
*/
livre *mylivre;
/**
* @element-type emprute
*/
emprute *myemprute;
/**
* @element-type bibliothèque
*/
bibliothèque *mybibliothèque;
/**
* @element-type livre
*/
std::vector< livre* > mylivre;
/**
* @element-type emprute
*/
emprute *myemprute;
/**
* @element-type emprute
*/
emprute *myemprute;
};
#endif // adhérent_h
<file_sep><?php
error_reporting(E_ALL);
/**
* untitledModel - class.adhérent.php
*
* $Id$
*
* This file is part of untitledModel.
*
* Automatically generated on 17.12.2019, 11:12:05 with ArgoUML PHP module
* (last revised $Date: 2010-01-12 20:14:42 +0100 (Tue, 12 Jan 2010) $)
*
* @author firstname and lastname of author, <<EMAIL>>
*/
if (0 > version_compare(PHP_VERSION, '5')) {
die('This file was generated for PHP 5');
}
/**
* include bibliothèque
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.bibliothèque.php');
/**
* include emprute
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.emprute.php');
/**
* include livre
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.livre.php');
/* user defined includes */
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014B2-includes begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014B2-includes end
/* user defined constants */
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014B2-constants begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014B2-constants end
/**
* Short description of class adhérent
*
* @access public
* @author firstname and lastname of author, <<EMAIL>>
*/
class adhérent
{
// --- ASSOCIATIONS ---
// generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd :
// --- ATTRIBUTES ---
/**
* Short description of attribute prénom
*
* @access public
* @var String
*/
public $prénom = null;
/**
* Short description of attribute nom
*
* @access public
* @var String
*/
public $nom = null;
// --- OPERATIONS ---
/**
* Short description of method emprunterLivre
*
* @access public
* @author firstname and lastname of author, <<EMAIL>>
* @return mixed
*/
public function emprunterLivre()
{
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151A begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151A end
}
/**
* Short description of method rendreLivre
*
* @access public
* @author firstname and lastname of author, <<EMAIL>>
* @return mixed
*/
public function rendreLivre()
{
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151C begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151C end
}
} /* end of class adhérent */
?><file_sep>#include "bibliothèque.h"
void bibliothèque::ajouterAdherent()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C1 begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C1 end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
void bibliothèque::enleverAdherent()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C3 begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C3 end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
void bibliothèque::ajouterDocument()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C5 begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014C5 end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
<file_sep>#include "A.h"
void A::perimetre()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BEF begin
{
}
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BEF end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
void A::aire()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000C0B begin
{
}
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000C0B end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
<file_sep>var searchData=
[
['banque_0',['banque',['../classbanque.html',1,'']]]
];
<file_sep>#include "emprute.h"
void emprute::pronlongerDateRetour()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014DE begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014DE end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
<file_sep>// FILE: C:/Users/Lenovo/Desktop/UML//A.cs
// In this section you can add your own using directives
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BE2 begin
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BE2 end
/// <summary>
/// A class that represents ...
///
/// @see OtherClasses
/// @author your_name_here
/// </summary>
public class A
{
// Attributes
public Integer R;
public Integer Center;
public String cle;
// Associations
/// <summary>
/// </summary>
public ArrayList A_C;
/// <summary>
/// </summary>
public ArrayList A_B;
// Operations
/// <summary>
/// An operation that does...
///
/// @param firstParam a description of this parameter
/// </summary>
/// <returns>
/// </returns>
public void perimetre()
{
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BEF begin
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000BEF end
}
/// <summary>
/// An operation that does...
///
/// @param firstParam a description of this parameter
/// </summary>
/// <returns>
/// </returns>
public void aire()
{
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000C0B begin
// section -64--88-56-1-29c7786a:16deec23af4:-8000:0000000000000C0B end
}
} /* end class A */
<file_sep>// FILE: C:/Users/Lenovo/Desktop/GL tp/TP5//Dicionnaire.cs
// In this section you can add your own using directives
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E0 begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E0 end
/// <summary>
/// A class that represents ...
///
/// @see OtherClasses
/// @author your_name_here
/// </summary>
public class Dicionnaire : volume
{} /* end class Dicionnaire */
<file_sep>// FILE: C:/Users/Lenovo/Desktop/GL tp/TP5//adhérent.cs
using System.Collections;
// In this section you can add your own using directives
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014B2 begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014B2 end
/// <summary>
/// A class that represents ...
///
/// @see OtherClasses
/// @author your_name_here
/// </summary>
public class adhérent
{
// Attributes
public String prénom;
public String nom;
// Associations
/// <summary>
/// </summary>
public ArrayList mybibliothèque;
/// <summary>
/// </summary>
public ArrayList mybibliothèque;
/// <summary>
/// </summary>
public ArrayList mylivre;
/// <summary>
/// </summary>
public ArrayList myemprute;
@element-type livre
*/
public livre mylivre;
/// <summary>
/// </summary>
public ArrayList myemprute;
/// <summary>
/// </summary>
public ArrayList mylivre;
/// <summary>
/// </summary>
public ArrayList myemprute;
/// <summary>
/// </summary>
public ArrayList mybibliothèque;
@element-type livre
*/
public livre mylivre;
/// <summary>
/// </summary>
public ArrayList myemprute;
/// <summary>
/// </summary>
public ArrayList myemprute;
// Operations
/// <summary>
/// An operation that does...
///
/// @param firstParam a description of this parameter
/// </summary>
/// <returns>
/// </returns>
public void emprunterLivre()
{
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151A begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151A end
}
/// <summary>
/// An operation that does...
///
/// @param firstParam a description of this parameter
/// </summary>
/// <returns>
/// </returns>
public void rendreLivre()
{
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151C begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151C end
}
} /* end class adhérent */
<file_sep>#ifndef BANQUE_H
#define BANQUE_H
#include"compte.h"
using namespace std;
class banque
{
public:
banque(int=20);
~banque();
void add(compte c);
void deletee(int numc);
compte getcompte(int numc);
void affichage();
private:
compte *agence;
int nbreel;
int nbrelimite;
};
#endif
<file_sep>#ifndef livre_h
#define livre_h
#include "boolean.h"
#include "volume.h"
class adhérent;
class emprute;
class BD;
class livre : public volume {
public:
virtual void empruntable();
public:
boolean disponible;
public:
/**
* @element-type adhérent
*/
adhérent *myadhérent;
/**
* @element-type emprute
*/
emprute *myemprute;
adhérent *myadhérent;
/**
* @element-type emprute
*/
emprute *myemprute;
/**
* @element-type adhérent
*/
adhérent *myadhérent;
/**
* @element-type BD
*/
BD *myBD;
adhérent *myadhérent;
};
#endif // livre_h
<file_sep>#include "journal.h"
<file_sep>#ifndef emprute_h
#define emprute_h
#include <vector>
#include "date.h"
class adhérent;
class livre;
class emprute;
class emprute {
public:
virtual void pronlongerDateRetour();
public:
date date_emprunt;
date date_retour;
public:
/**
* @element-type adhérent
*/
adhérent *myadhérent;
/**
* @element-type livre
*/
livre *mylivre;
/**
* @element-type emprute
*/
emprute *myemprute;
/**
* @element-type emprute
*/
emprute *myemprute;
adhérent *myadhérent;
/**
* @element-type livre
*/
std::vector< livre* > mylivre;
/**
* @element-type adhérent
*/
adhérent *myadhérent;
/**
* @element-type adhérent
*/
adhérent *myadhérent;
/**
* @element-type adhérent
*/
adhérent *myadhérent;
};
#endif // emprute_h
<file_sep><?php
error_reporting(E_ALL);
/**
* untitledModel - class.BD.php
*
* $Id$
*
* This file is part of untitledModel.
*
* Automatically generated on 17.12.2019, 11:12:05 with ArgoUML PHP module
* (last revised $Date: 2010-01-12 20:14:42 +0100 (Tue, 12 Jan 2010) $)
*
* @author firstname and lastname of author, <<EMAIL>>
*/
if (0 > version_compare(PHP_VERSION, '5')) {
die('This file was generated for PHP 5');
}
/**
* include livre
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.livre.php');
/**
* include volume
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.volume.php');
/* user defined includes */
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E1-includes begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E1-includes end
/* user defined constants */
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E1-constants begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E1-constants end
/**
* Short description of class BD
*
* @access public
* @author firstname and lastname of author, <<EMAIL>>
*/
class BD
extends volume
{
// --- ASSOCIATIONS ---
// generateAssociationEnd :
// --- ATTRIBUTES ---
/**
* Short description of attribute dessinateur
*
* @access public
* @var String
*/
public $dessinateur = null;
// --- OPERATIONS ---
} /* end of class BD */
?><file_sep>#include <stdio.h>
#include <stdlib.h>
int main()
{/* int tab[50];
int m;
printf("donnez la dimmension de votre tab");
scanf("%d",&m);
for(int i=0;i<m;i++)
{
scanf("%d",&tab[i]);}
affichevecteur(tab, m);
int mat[2][2]={(1,3),(6,7)};
affichematrice(mat,2,2);*/
//exe2
return 0;
}
//EXE 1 :deux fonction pour afficher une vecteur et une matrice
/*void affichevecteur(int*vecteur,int dimension)
{
for(int i=0;i<dimension;i++)
{
printf("%d",vecteur[i]);
printf("\n");
}
}
void affichematrice(int**matrice,int ligne,int colonne)
{
for(int i=0;i<ligne;i++)
{ /*for(int j=0;j<colonne;j++)
printf("%d%d",matrice[i][j]);
affichevecteur(matrice[i],colonne);
printf("\n");
}
}*/
//EXE2:ecrire une fonction qui alloue la memoire d un vecteur puis une fonction pour libere le vecteur
/*int* alloue_vecteur(int dimmension,int val)
{
int i;
int* vecteur=malloc(dimmension*sizeof(int));
for(i=0;i<dimmension;i++)
{
vecteur[i]=val;
return vecteur;
}
}
void libre_vecteur(int*vecteur)
{
free(vecteur);
}
//exe3 et puis la meme demarche pour la matrice
int** alloue_matrice(int lignes,int colonnes,int val)
{
int i,j;
int**matrice=malloc(lignes*sizeof(int*));
for(i=0;i<lignes;i++)
{
for(j=0;i<colonnes;j++)
{
matrice[i][j]=val;
}
}
return matrice;
}
void libere_matrice(int**matrice,int lignes)
{
int i;
for(i=0;i<lignes;i++)
{
free((int*)matrice[i]);
}
}*/
//exe 4 fonction pour allouer la memoire des matrice d identite
int** alloue_matrice_zero(int lignes,int colonnes)
{
int i;
int**matrice=calloc(lignes,sizeof(int*));
for(i=0;i<lignes;i++)
{
matrice[i]=calloc(colonnes,sizeof(int));
}
return matrice;
}
int** genere_matrice_identite(int dimension)
{
int i;
int**identite=alloue_matrice_zero(dimension,dimension);
for(i=0;i<dimension;i++)
{
identite[i][j]=1;
affiche_matrice(identite,dimension,dimension);
}
return identite;
}
//il faut connaitre la difference entre reference a lutilisation un passage pmar valeur et une comportement de passage par pointeur et il va pour simplifier le code par contre pointeur
//ce tp est pour la location dynamique
<file_sep>import java.util.Vector;
public class emprute {
public date date_emprunt;
public date date_retour;
public Vector myadhérent;
public Vector mylivre;
public Vector myemprute;
public Vector myemprute;
public adhérent myadhérent;
/**
*
* @element-type livre
*/
public Vector mylivre;
public Vector myadhérent;
public Vector myadhérent;
public Vector myadhérent;
public void pronlongerDateRetour() {
}
}<file_sep>#ifndef COMPTE_H
#define COMPTE_H
using namespace std;
class compte
{
public:
compte();
compte(char*,int,float);
compte(const compte &);
~compte();
compte &operator=(const compte&);
void consulter();
void retrait(float);
void depot(float);
int numerocompte();
private:
char *nom;
int num;
float solde;
};
#endif
<file_sep>#ifndef A_h
#define A_h
class C;
class B;
class A {
public:
virtual void perimetre();
virtual void aire();
public:
Integer R;
Integer Center;
String cle;
public:
C *A_C;
B *A_B;
};
#endif // A_h
<file_sep>#ifndef C_h
#define C_h
class A;
class B;
class ;
class C {
public:
/**
* @element-type A
*/
A *A_C[ 2];
B *B_C;
/**
* @element-type
*/
*my;
};
#endif // C_h
<file_sep>#ifndef journal_h
#define journal_h
#include "Document.h"
class volume;
class journal : public Document, virtual public Document {
public:
Integer date_parution;
public:
/**
* @element-type volume
*/
volume *myvolume;
};
#endif // journal_h
<file_sep>#include "livre.h"
void livre::empruntable()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014EA begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014EA end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
<file_sep>var searchData=
[
['compte_3',['compte',['../classcompte.html',1,'']]]
];
<file_sep>#include "adhérent.h"
void adhérent::emprunterLivre()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151A begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151A end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
void adhérent::rendreLivre()
// don't delete the following line as it's needed to preserve source code of this autogenerated element
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151C begin
{
}
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:000000000000151C end
// don't delete the previous line as it's needed to preserve source code of this autogenerated element
<file_sep>#include "BD.h"
<file_sep>#ifndef volume_h
#define volume_h
#include "Document.h"
#include "string.h"
class journal;
class volume : public Document, public Document {
public:
string auteur;
public:
/**
* @element-type journal
*/
journal *myjournal;
};
#endif // volume_h
<file_sep>var searchData=
[
['compte_1',['compte',['../classcompte.html',1,'']]]
];
<file_sep>#ifndef Document_h
#define Document_h
#include <vector>
#include "string.h"
class bibliothèque;
class Document {
public:
string titre;
public:
/**
* @element-type bibliothèque
*/
bibliothèque *mybibliothèque;
/**
* @element-type bibliothèque
*/
bibliothèque *mybibliothèque;
/**
* @element-type bibliothèque
*/
std::vector< bibliothèque* > mybibliothèque;
};
#endif // Document_h
<file_sep>#ifndef Dicionnaire_h
#define Dicionnaire_h
#include "volume.h"
class Dicionnaire : public volume {};
#endif // Dicionnaire_h
<file_sep><?php
error_reporting(E_ALL);
/**
* untitledModel - class.livre.php
*
* $Id$
*
* This file is part of untitledModel.
*
* Automatically generated on 17.12.2019, 11:12:05 with ArgoUML PHP module
* (last revised $Date: 2010-01-12 20:14:42 +0100 (Tue, 12 Jan 2010) $)
*
* @author firstname and lastname of author, <<EMAIL>>
*/
if (0 > version_compare(PHP_VERSION, '5')) {
die('This file was generated for PHP 5');
}
/**
* include BD
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.BD.php');
/**
* include adhérent
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.adhérent.php');
/**
* include volume
*
* @author firstname and lastname of author, <<EMAIL>>
*/
require_once('class.volume.php');
/* user defined includes */
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E5-includes begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E5-includes end
/* user defined constants */
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E5-constants begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014E5-constants end
/**
* Short description of class livre
*
* @access public
* @author firstname and lastname of author, <<EMAIL>>
*/
class livre
extends volume
{
// --- ASSOCIATIONS ---
// generateAssociationEnd : // generateAssociationEnd : // generateAssociationEnd :
// --- ATTRIBUTES ---
/**
* Short description of attribute disponible
*
* @access public
* @var boolean
*/
public $disponible = false;
// --- OPERATIONS ---
/**
* Short description of method empruntable
*
* @access public
* @author firstname and lastname of author, <<EMAIL>>
* @return mixed
*/
public function empruntable()
{
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014EA begin
// section -64--88-56-1-49f21be0:16e1d144e90:-8000:00000000000014EA end
}
} /* end of class livre */
?><file_sep>public class Dicionnaire extends volume {
} | 6021fa7a6694154acfed9a3d5ae41f2ed95b7fba | [
"JavaScript",
"C#",
"Java",
"PHP",
"C",
"C++"
] | 40 | C# | ouafaema/mestp | 09099c4021588315ebaa020771539f35310501b0 | 2a84ccd88c56bf5ce4d38e9d2a61bc1fd5ca351f |
refs/heads/master | <repo_name>apatel98/full_stack<file_sep>/asimLab2/greeter.ts
interface Person{
firstNmae: string;
lastName: string;
}
function greeter(person){
return "Hello," + person.firstNmae + "" + person.lastName;
}
let user = {firstNmae:"Jane", lastName: "User"};
document.body.textContent = greeter(user); | a31866863df3db2998481c1ccd9507624bf9621e | [
"TypeScript"
] | 1 | TypeScript | apatel98/full_stack | 34569772ad077d3221cac2225a411b66af59ad8c | f572fd8f96fc17bd96a29c81ad31a9660b33f491 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Game
{
public class Food
{
public Vector2f Position { get; set; }
public byte Color { get; set; }
public Food(Vector2f position, byte color)
{
this.Position = position;
this.Color = color;
}
public Food(byte color)
{
this.Color = color;
this.setRandomPosition();
}
public Food()
{
this.Color = (byte)Metadata.Rng.Next(0, 23);
this.setRandomPosition();
}
private void setRandomPosition()
{
var randomX = Metadata.Rng.Next(22907, 30000);
var randomY = Metadata.Rng.Next(19137, 30337);
this.Position = new Vector2f(randomX, randomY);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Client
{
public class CMSG_SetUsername : IPacket
{
public char ProtocolId
{
get
{
throw new NotImplementedException();
}
}
public string Username { get; set; }
public byte SkinId { get; set; }
public void Deserialize(byte[] data)
{
var reader = new BigEndianReader(new MemoryStream(data));
reader.ReadByte();
reader.ReadByte();
this.SkinId = reader.ReadByte();
this.Username = reader.ReadEndString();
}
public byte[] Serialize()
{
throw new NotImplementedException();
}
}
}
<file_sep>using SlitherNET.Network;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Game
{
public class Snake
{
public int ID { get; set; }
public float Speed { get; set; }
public int Skin { get; set; }
public Vector2f Position { get; set; }
public string Name { get; set; }
public Vector2f HeadPosition { get; set; }
public List<SnakePart> Parts { get; set; }
public int Size { get; set; }
public short CurrentAngle { get; set; }
public GameClient Player { get; set; }
public Snake()
{
this.Parts = new List<SnakePart>();
for (int i = 0; i < 50; i += 2)
{
this.Parts.Add(new SnakePart(this, new Vector2f(i + 1, i + 2)));
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Game
{
public class SnakePart
{
public Snake Body { get; set; }
public Vector2f Position { get; set; }
public SnakePart(Snake body, Vector2f position)
{
this.Body = body;
this.Position = position;
}
}
}
<file_sep>using SlitherNET.Game;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_l_Leaderboard : IPacket
{
public char ProtocolId
{
get
{
return 'l';
}
}
public short Rank { get; set; }
public List<Snake> Snakes { get; set; }
public SMSG_l_Leaderboard(short rank, List<Snake> snakes)
{
this.Rank = rank;
this.Snakes = snakes;
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var lengthOfUsername = 0;
this.Snakes.ForEach(x => lengthOfUsername += x.Player.Username.Length);
var bytes = new byte[(8 + lengthOfUsername) + (this.Snakes.Count * 7)];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte(this.ProtocolId));
writer.WriteByte(0);
writer.WriteShort(this.Rank);
writer.WriteShort((short)this.Snakes.Count);
foreach(var snake in this.Snakes)
{
writer.WriteShort(306);
writer.WriteInt24(0.7810754645511785 * 16777215);
writer.WriteByte((byte)snake.Skin);
writer.WriteString(snake.Player.Username);
}
return bytes;
}
}
}
<file_sep># SlitherNET
A unofficially .net server for the game slither.io
WIP Server, some crappy code here.
Thanks to : @Kogs, @circa94, @ThunderGemios10
<file_sep>using SlitherNET.Game;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_F_MapFoods : IPacket
{
public char ProtocolId
{
get
{
return 'F';
}
}
public List<Food> Foods { get; set; }
public SMSG_F_MapFoods(List<Food> foods)
{
this.Foods = foods;
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var bytes = new byte[3 + (6 * this.Foods.Count)];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte(this.ProtocolId));
foreach(var food in this.Foods)
{
writer.WriteByte(food.Color);
writer.WriteShort((short)food.Position.X);
writer.WriteShort((short)food.Position.Y);
writer.WriteByte(food.Color);
}
return bytes;
}
}
}
<file_sep>using SlitherNET.Game;
using SlitherNET.Network;
using SlitherNET.Network.Packets;
using SlitherNET.Network.Packets.Client;
using SlitherNET.Network.Packets.Server;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
using System.Timers;
using WebSocketSharp;
using WebSocketSharp.Server;
namespace SlitherNET.Network
{
public class GameClient : WebSocketBehavior
{
public int GameState = 1;
public string Username = string.Empty;
public Snake MySnake = null;
public bool Active = true;
public Timer LogicTimer { get; set; }
protected override void OnClose(CloseEventArgs e)
{
Console.WriteLine("Connection closed with the player");
this.Active = false;
GameRoom.Instance.RemovePlayer(this);
GameRoom.Instance.UpdateLeaderboard();
base.OnClose(e);
}
protected override void OnMessage(MessageEventArgs e)
{
Console.WriteLine("Received message (LEN : " + e.RawData.Length + ")");
if (GameState == 1) // Username
{
// Set the username
var usernamePacket = new CMSG_SetUsername();
usernamePacket.Deserialize(e.RawData);
this.Username = usernamePacket.Username;
// Send the initial packet
this.GameState = 2;
this.SendPacket(new SMSG_a_InitialPacket(21600));
this.MySnake = new Snake()
{
Player = this,
ID = 1,
Speed = (float)(5.76 * 1E3),
Skin = usernamePacket.SkinId,
Position = new Vector2f((float)(28907.6 * 5), (float)(21137.4 * 5)),
Name = this.Username == "" ? "Anonymous" : this.Username,
HeadPosition = new Vector2f(28907.3f * 5, 21136.8f * 5),
};
this.SendPacket(new SMSG_s_NewSnake(this.MySnake));
this.SendPacket(new SMSG_m_MessageOfTheDay(Program.Settings.Basic.Motd, Program.Settings.Basic.Caption));
GameRoom.Instance.AddPlayer(this);
GameRoom.Instance.ShowFoods(this);
GameRoom.Instance.UpdateLeaderboard();
this.SendPacket(new SMSG_p_Pong());
this.LogicTimer = new Timer(1000);
this.LogicTimer.Elapsed += (object sender, ElapsedEventArgs e2) =>
{
if (this.Active)
{
this.UpdateSnake();
}
else
{
this.LogicTimer.Stop();
}
};
this.LogicTimer.Start();
}
else if(this.GameState == 2) // Update game
{
var updatePacket = new CMSG_Update();
updatePacket.Deserialize(e.RawData);
//Console.WriteLine("ActionType : " + updatePacket.ActionType);
if(updatePacket.ActionType == 253) // Mouse down
{
}
else if(updatePacket.ActionType == 254) // Mouse up
{
}
else if (updatePacket.ActionType == 251) // Ping
{
this.SendPacket(new SMSG_p_Pong());
}
else // Mouse rotation
{
var degrees = (short)Math.Floor(updatePacket.ActionType * 1.44);
this.MySnake.CurrentAngle = degrees;
//Console.WriteLine("Mouse angle : " + degrees);
this.SendPacket(new SMSG_e_UpdateSnakeDirection(this.MySnake, degrees));
}
}
}
public void UpdateSnake()
{
var incX = ((float)Math.Cos((Math.PI / 180) * this.MySnake.CurrentAngle) * 170);
var incY = ((float)Math.Sin((Math.PI / 180) * this.MySnake.CurrentAngle) * 170);
this.MySnake.Position.X += incX;
this.MySnake.Position.Y += incY;
this.SendPacket(new SMSG_e_UpdateSnakeDirection(this.MySnake, this.MySnake.CurrentAngle));
this.SendPacket(new SMSG_G_UpdateSnake(this.MySnake));
}
public void SendPacket(IPacket packet)
{
var bs = packet.Serialize();
Console.WriteLine("Send packet ===> " + packet.GetType().Name + " (len: " + bs.Length + ")");
this.Send(bs);
}
}
}
<file_sep>using SlitherNET.Game;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_e_UpdateSnakeDirection : IPacket
{
public char ProtocolId
{
get
{
return 'e';
}
}
public Snake @Snake { get; set; }
public double Direction { get; set; }
public SMSG_e_UpdateSnakeDirection(Snake snake, double direction)
{
this.Snake = snake;
this.Direction = direction;
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var bytes = new byte[10];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte(this.ProtocolId));
writer.WriteShort((short)this.Snake.ID);
writer.WriteByte((byte) this.Direction);
writer.WriteByte((byte) 71);
writer.WriteByte((byte) 104);
return bytes;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_p_Pong : IPacket
{
public char ProtocolId
{
get
{
return 'p';
}
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var bytes = new byte[3];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte(this.ProtocolId));
return bytes;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_a_InitialPacket : IPacket
{
public char ProtocolId
{
get
{
return 'a';
}
}
public int Radius { get; set; }
public SMSG_a_InitialPacket(int radius)
{
this.Radius = radius;
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var bytes = new byte[30];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte(this.ProtocolId));
writer.WriteInt24(this.Radius);
writer.WriteShort(411);
writer.WriteShort(480);
writer.WriteShort(130);
writer.WriteByte((byte)4.8 * 10);
writer.WriteShort((short) 4.25 * 100);
writer.WriteShort((short) 0.5f * 100);
writer.WriteShort((short) 12 * 100);
writer.WriteShort((short)(0.033 * 1E3));
writer.WriteShort((short)(0.028 * 1E3));
writer.WriteShort((short)(0.43 * 1E3));
writer.WriteByte(Metadata.PROTOCOL_VERSION);
return bytes;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET
{
public class Metadata
{
public const byte PROTOCOL_VERSION = 6;
public static Random Rng = new Random(Environment.TickCount);
}
}
<file_sep>using SlitherNET.Game;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_s_NewSnake : IPacket
{
public char ProtocolId
{
get
{
return 's';
}
}
public Snake @Snake { get; set; }
public SMSG_s_NewSnake(Snake snake)
{
this.Snake = snake;
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var bytes = new byte[(37 + this.Snake.Name.Length) + (2 * this.Snake.Parts.Count)];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte(this.ProtocolId));
writer.WriteShort((short)this.Snake.ID);
writer.WriteInt24(3.1415926535 / Math.PI * 16777215);
writer.WriteByte(0);
writer.WriteInt24(3.1415926535 / Math.PI * 16777215);
writer.WriteShort((short)this.Snake.Speed);
writer.WriteInt24(0.028860630325116536 * 16777215);
writer.WriteByte((byte)this.Snake.Skin);
writer.WriteInt24(this.Snake.Position.X);
writer.WriteInt24(this.Snake.Position.Y);
writer.WriteString(this.Snake.Name);
writer.WriteInt24(this.Snake.HeadPosition.X);
writer.WriteInt24(this.Snake.HeadPosition.Y);
foreach(var part in this.Snake.Parts)
{
writer.WriteByte((byte)part.Position.X);
writer.WriteByte((byte)part.Position.Y);
}
return bytes;
}
}
}
<file_sep>using SlitherNET.Network;
using SlitherNET.Network.Packets.Server;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Game
{
public class GameRoom
{
public List<Food> Foods { get; set; }
public List<GameClient> Players { get; set; }
public GameRoom()
{
this.Foods = new List<Food>();
this.Players = new List<GameClient>();
this.initializeFoods();
}
private void initializeFoods()
{
for (int i = 0; i < 10000; i++)
{
this.Foods.Add(new Food());
}
}
public void AddPlayer(GameClient session)
{
lock (this.Players)
{
this.Players.Add(session);
}
}
public void RemovePlayer(GameClient session)
{
lock (this.Players)
{
this.Players.Remove(session);
}
}
public void ShowFoods(GameClient session)
{
session.SendPacket(new SMSG_F_MapFoods(this.Foods));
}
public void UpdateLeaderboard()
{
var snakes = new List<Snake>();
foreach(var c in this.Players)
{
snakes.Add(c.MySnake);
}
foreach(var client in this.Players)
{
client.SendPacket(new SMSG_l_Leaderboard(1, snakes));
}
}
private static GameRoom mRoom { get; set; }
public static GameRoom Instance
{
get
{
if (mRoom == null) mRoom = new GameRoom();
return mRoom;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Client
{
public class CMSG_Update : IPacket
{
public char ProtocolId
{
get
{
throw new NotImplementedException();
}
}
public byte ActionType { get; set; }
public void Deserialize(byte[] data)
{
var reader = new BigEndianReader(new MemoryStream(data));
this.ActionType = reader.ReadByte();
}
public byte[] Serialize()
{
throw new NotImplementedException();
}
}
}
<file_sep>using SlitherNET.Network;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using WebSocketSharp.Server;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
namespace SlitherNET
{
public class Program
{
public static Settings @Settings { get; set; }
public static void Main(string[] args)
{
Console.Title = "SlitherNET";
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("SlitherNET by Nightwolf");
Console.ForegroundColor = ConsoleColor.Gray;
LoadSettings();
var wssv = new WebSocketServer("ws://" + Settings.Network.Addr + ":" + Settings.Network.Port);
wssv.AddWebSocketService<GameClient>("/slither");
wssv.Start();
Console.WriteLine("Listen on " + Settings.Network.Addr + ":" + Settings.Network.Port);
while (true)
Console.ReadLine();
}
public static void LoadSettings()
{
var reader = new StreamReader("Settings.yaml");
var deserializer = new Deserializer(namingConvention: new CamelCaseNamingConvention());
@Settings = deserializer.Deserialize<Settings>(reader);
reader.Close();
}
}
public class Settings
{
public NetworkSettings Network { get; set; }
public BasicSettings Basic { get; set; }
public class NetworkSettings
{
public string Addr { get; set; }
public int Port { get; set; }
}
public class BasicSettings
{
public string Motd { get; set; }
public string Caption { get; set; }
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SlitherNET.Network.Packets.Server
{
public class SMSG_m_MessageOfTheDay : IPacket
{
public char ProtocolId
{
get
{
throw new NotImplementedException();
}
}
public string Message { get; set; }
public string Author { get; set; }
public SMSG_m_MessageOfTheDay(string message, string author)
{
this.Message = message;
this.Author = author;
}
public void Deserialize(byte[] data)
{
throw new NotImplementedException();
}
public byte[] Serialize()
{
var bytes = new byte[1000];
var writer = new BigEndianWriter(new MemoryStream(bytes));
writer.WriteByte(0);
writer.WriteByte(0);
writer.WriteByte(Convert.ToByte('m'));
writer.WriteByte((byte)(462 >> 16));
writer.WriteByte((byte)(462 >> 8));
writer.WriteByte((byte)(462 & 0xFF));
var loc1 = (int)0.580671702663404 * 16777215;
writer.WriteByte((byte)(loc1 >> 16));
writer.WriteByte((byte)(loc1 >> 8));
writer.WriteByte((byte)(loc1 & 0xFF));
writer.WriteString(this.Author);
writer.WriteString(this.Message);
return bytes;
}
}
}
| 8ab03fbd3110519080da528d957668a035e3f96d | [
"Markdown",
"C#"
] | 17 | C# | zontreck/SlitherNET | 42e48b08a5e870da62992431c01d063442f7a667 | d2e58116138d7f12b6969b9a03dfd4a35b1d69e8 |
refs/heads/master | <repo_name>jgoppert/qualisys_ros<file_sep>/src/gazebo_model_pose.cpp
#include <functional>
#include <gazebo/gazebo.hh>
#include <gazebo/physics/physics.hh>
#include <gazebo/common/common.hh>
#include <ignition/math/Vector3.hh>
#include <ros/ros.h>
#include <geometry_msgs/PoseStamped.h>
namespace gazebo
{
class ModelPose : public ModelPlugin
{
private:
physics::ModelPtr model{NULL};
ros::Subscriber sub;
ros::NodeHandle n;
public:
ModelPose() {
std::map<std::string, std::string> args;
ros::init(args, "set_pose");
}
void Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf) {
// Store the pointer to the model
this->model = _parent;
this->model->SetGravityMode(false);
this->model->SetStatic(true);
if (_sdf->HasElement("topic")) {
std::string topic = _sdf->GetElement("topic")->Get<std::string>();
this->sub = this->n.subscribe(topic, 1000, &ModelPose::PoseCallback, this);
} else {
gzerr << "[gazebo_model_pose] Must set pose topic";
}
}
void PoseCallback(const geometry_msgs::PoseStamped& msg) {
ignition::math::Pose3d pose = {msg.pose.position.x, msg.pose.position.y, msg.pose.position.z,
msg.pose.orientation.w, msg.pose.orientation.x,
msg.pose.orientation.y, msg.pose.orientation.z
};
this->model->SetWorldPose(pose);
}
};
// Register this plugin with the simulator
GZ_REGISTER_MODEL_PLUGIN(ModelPose)
}
/* vim: set et fenc=utf-8 ff=unix sts=0 sw=2 ts=2 : */
<file_sep>/scripts/image_combine.py
#!/usr/bin/env python3
import rospy
import message_filters
from sensor_msgs.msg import Image, CameraInfo
from cv_bridge import CvBridge, CvBridgeError
import cv2
def callback(image_left, image_right):
left = bridge.imgmsg_to_cv2(image_left, "bgr8")
right = bridge.imgmsg_to_cv2(image_right, "bgr8")
img_concat = cv2.hconcat([left, right])
try:
pub_img.publish(bridge.cv2_to_imgmsg(img_concat, "bgr8"))
except CvBridgeError as e:
print(e)
rospy.init_node('image_combine')
pub_img = rospy.Publisher('vr_image', Image, queue_size=1)
bridge = CvBridge()
rospy.loginfo('image combine starting')
image_left = message_filters.Subscriber('/virtual_cam/left/image_raw', Image)
image_right = message_filters.Subscriber('/virtual_cam/right/image_raw', Image)
ts = message_filters.TimeSynchronizer([image_left, image_right], 1)
ts.registerCallback(callback)
rospy.spin()
<file_sep>/src/qualisys_node.cpp
#include <unistd.h>
#include <math.h>
#include <map>
#include <ros/ros.h>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/LinearMath/Quaternion.h>
#include <tf2_ros/transform_broadcaster.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/TransformStamped.h>
#include <nav_msgs/Odometry.h>
#include "RTProtocol.h"
#include "RTPacket.h"
/*--------------------------------------------------------------------
* main()
* Main function to set up ROS node.
*------------------------------------------------------------------*/
int main(int argc, char **argv)
{
using namespace std;
// Set up ROS.
ros::init(argc, argv, "qualisys_node");
ros::NodeHandle nh("~");;
// publications
std::map<std::string, ros::Publisher> pub_pose;
std::map<std::string, ros::Publisher> pub_odom;
std::map<std::string, ros::Time> pub_stamps;
// Set up a dynamic reconfigure server.
// This should be done before reading parameter server values.
//dynamic_reconfigure::Server<node_example::node_example_paramsConfig> dr_srv;
//dynamic_reconfigure::Server<node_example::node_example_paramsConfig>::CallbackType cb;
//cb = boost::bind(&NodeExample::configCallback, node_example, _1, _2);
//dr_srv.setCallback(cb);
// declare variables that can be modified by launch file or command line.
string server;
string parent_frame;
double rate_limit;
int slow_count = 0; // watch for slow publication
// for real-time we want a small queue_size
const int queue_size = 1;
// initialize parameters from launch file or command line.
nh.param("server", server, string("127.0.0.1"));
nh.param("rate_limit", rate_limit, 10.0);
nh.param("parent_frame", parent_frame, string("qualisys"));
try
{
CRTProtocol rtProtocol;
//Example code for how to use discovery calls.
//if (rtProtocol.DiscoverRTServer(4534, false))
//{
// sleep(1);
// const auto numberOfResponses = rtProtocol.GetNumberOfDiscoverResponses();
// for (auto index = 0; index < numberOfResponses; index++)
// {
// unsigned int addr;
// unsigned short basePort;
// std::string message;
// if (rtProtocol.GetDiscoverResponse(index, addr, basePort, message))
// {
// printf("%2d - %d.%d.%d.%d:%d\t- %s\n", index, 0xff & addr, 0xff & (addr >> 8), 0xff & (addr >> 16), 0xff & (addr >> 24), basePort, message.c_str());
// }
// }
//}
//else
//{
// printf("%s", rtProtocol.GetErrorString());
//}
const unsigned short basePort = 22222;
const int majorVersion = 1;
const int minorVersion = 19;
const bool bigEndian = false;
bool dataAvailable = false;
bool streamFrames = false;
unsigned short udpPort = 6734;
// Main loop.
while (nh.ok())
{
if (!rtProtocol.Connected())
{
if (!rtProtocol.Connect(server.c_str(), basePort, &udpPort, majorVersion, minorVersion, bigEndian))
{
ROS_WARN("rtProtocol.Connect: %s\n\n", rtProtocol.GetErrorString());
sleep(1);
continue;
}
}
if (!dataAvailable)
{
if (!rtProtocol.Read6DOFSettings(dataAvailable))
{
ROS_WARN("rtProtocol.Read6DOFSettings: %s\n\n", rtProtocol.GetErrorString());
sleep(1);
continue;
}
}
if (!streamFrames)
{
if (!rtProtocol.StreamFrames(CRTProtocol::RateAllFrames, 0, udpPort, NULL, CRTProtocol::cComponent6d))
{
ROS_WARN("rtProtocol.StreamFrames: %s\n\n", rtProtocol.GetErrorString());
sleep(1);
continue;
}
streamFrames = true;
ROS_INFO("Starting to streaming 6DOF data");
}
CRTPacket::EPacketType packetType;
if (rtProtocol.ReceiveRTPacket(packetType, true) > 0)
{
if (packetType == CRTPacket::PacketData)
{
ros::Time now = ros::Time::now();
float fX, fY, fZ;
float rotationMatrix[9];
CRTPacket *rtPacket = rtProtocol.GetRTPacket();
//ROS_WARN("Frame %d\n", rtPacket->GetFrameNumber());
for (unsigned int i = 0; i < rtPacket->Get6DOFBodyCount(); i++)
{
if (rtPacket->Get6DOFBody(i, fX, fY, fZ, rotationMatrix))
{
string name(rtProtocol.Get6DOFBodyName(i));
//ROS_WARN("data received for rigid body %s", name.c_str());
if (!isfinite(fX) || !isfinite(fY) || !isfinite(fZ)) {
ROS_WARN_THROTTLE(3, "rigid body %s tracking lost", name.c_str());
continue;
}
for (int i=0; i<9; i++) {
if (!isfinite(rotationMatrix[i])) {
ROS_WARN_THROTTLE(3, "rigid body %s tracking lost", name.c_str());
continue;
}
}
// convert to quaternion
tf2::Matrix3x3 R(
rotationMatrix[0], rotationMatrix[3], rotationMatrix[6],
rotationMatrix[1], rotationMatrix[4], rotationMatrix[7],
rotationMatrix[2], rotationMatrix[5], rotationMatrix[8]);
tf2::Quaternion q;
R.getRotation(q);
// scale position to meters from mm
double x = fX/1.0e3;
double y = fY/1.0e3;
double z = fZ/1.0e3;
double elapsed = 0;
// publish data if rate limit met
if (pub_stamps.count(name) == 0) {
elapsed = 0;
} else {
elapsed = (now - pub_stamps[name]).toSec();
if (elapsed < 0.99/rate_limit) {
// wait
continue;
}
}
pub_stamps[name] = now;
// warning if slow
if (elapsed > 3.0/rate_limit) {
slow_count += 1;
if (slow_count > 10) {
ROS_WARN_THROTTLE(3, "publication rate low: %10.4f Hz", 1.0/elapsed);
slow_count = 0;
}
}
// publish transform
/*
{
static tf2_ros::TransformBroadcaster br;
geometry_msgs::TransformStamped transformStamped;
transformStamped.header.stamp = now;
transformStamped.header.frame_id = parent_frame;
transformStamped.child_frame_id = name;
transformStamped.transform.translation.x = x;
transformStamped.transform.translation.y = y;
transformStamped.transform.translation.z = z;
transformStamped.transform.rotation.x = q.x();
transformStamped.transform.rotation.y = q.y();
transformStamped.transform.rotation.z = q.z();
transformStamped.transform.rotation.w = q.w();
br.sendTransform(transformStamped);
}
*/
/*
// publish pose stamped message
{
if (pub_pose.find(name) == pub_pose.end()) {
ROS_INFO("rigid body %s pose added", name.c_str());
pub_pose[name] = nh.advertise<geometry_msgs::PoseStamped>(name + "/pose", queue_size);
}
geometry_msgs::PoseStamped msg;
msg.header.frame_id= parent_frame;
msg.header.stamp = now;
msg.pose.position.x = x;
msg.pose.position.y = y;
msg.pose.position.z = z;
msg.pose.orientation.x = q.x();
msg.pose.orientation.y = q.y();
msg.pose.orientation.z = q.z();
msg.pose.orientation.w = q.w();
pub_pose[name].publish(msg);
}
*/
// publish odom message
{
if (pub_odom.find(name) == pub_odom.end()) {
ROS_INFO("rigid body %s odom added", name.c_str());
pub_odom[name] = nh.advertise<nav_msgs::Odometry>(name + "/odom", queue_size);
}
nav_msgs::Odometry msg;
msg.header.frame_id= parent_frame;
msg.header.stamp = now;
msg.child_frame_id=name;
for (int i=0; i < 36; i++) msg.pose.covariance[i] = NAN;
msg.pose.pose.position.x = x;
msg.pose.pose.position.y = y;
msg.pose.pose.position.z = z;
msg.pose.pose.orientation.x = q.x();
msg.pose.pose.orientation.y = q.y();
msg.pose.pose.orientation.z = q.z();
msg.pose.pose.orientation.w = q.w();
for (int i=0; i < 36; i++) msg.twist.covariance[i] = NAN;
msg.twist.twist.linear.x = NAN;
msg.twist.twist.linear.y = NAN;
msg.twist.twist.linear.z = NAN;
msg.twist.twist.angular.x = NAN;
msg.twist.twist.angular.y = NAN;
msg.twist.twist.angular.z = NAN;
pub_odom[name].publish(msg);
}
}
}
}
}
}
ros::spinOnce();
rtProtocol.StreamFramesStop();
rtProtocol.Disconnect();
}
catch (std::exception &e)
{
printf("%s\n", e.what());
}
return 1;
return 0;
} // end main()
<file_sep>/README.md
# qualisys_ros
ROS Qualisys Package
## Installation
This is a standard ROS1 packages. We plan to support ROS2 by end of Q1 2021.
```bash
mkdir -p ~/catkin_ws/src
cd ~/catkin_ws/src
git clone <EMAIL>:jgoppert/qualisys_ros
cd ~/catkin_ws
catkin init
catkin build
```
## Usage
### Live
The first launch script starts a vehicle for live testing which routes motion capture packets via mavros to a vehicle for navigtion or sensor emulation.
```bash
. ./devel/setup.bash
roslaunch qualisys_ros px4_live.launch
```
### Simulation
The second launch script starts a simulated world in Gazebo which contains a virtual camera that is positioned using Qualisys mocap data. A bag file of Qualisys data for offline testing is included in this repository.
```bash
. ./devel/setup.bash
roslaunch qualisys_ros bag.launch
```
### Web Video Server
This starts a web video server to stream images from the cameras to a browser.
See [Ros Web Video Server Wiki](http://wiki.ros.org/web_video_server)
```bash
sudo apt install ros-noetic-web-video-server
. ./devel/setup.bash
rosrun web_video_server web_video_server
```
Now try going to your browser to view the video
```bash
http://localhost:8080/stream?topic=/virtual_cam/usb_cam/image_raw
```
Note localhost should be changed to the ip address on the local network of the simulation computer if viewing from a phone etc.
| a349c12ee07b12c05ad70da1e234f6a2772002df | [
"Markdown",
"Python",
"C++"
] | 4 | C++ | jgoppert/qualisys_ros | 20ae7bd356f8472231b3153f9884c520c9c15c3d | 0d2aedd30a98d50f9ad51443a7028be1c8d7dd04 |
refs/heads/master | <repo_name>ngtrdai197/tired-install-ubuntu<file_sep>/basic_install.sh
# Install main package
echo 'Install basic package'
sudo apt-get update && sudo apt-get install curl wget python3-dev cryptsetup -y
EXTRACT_INSTALL=""
if [$CUDA]; then
# Cuda
CUDA_VERSION=9.1.85
# echo 'Install cuda and nvidia'
curl 'http://developer.download.nvidia.com/compute/cuda/repos/ubuntu1704/x86_64/cuda-repo-ubuntu1704_${CUDA_VERSION}-1_amd64.deb' -o /tmp/cuda.deb
sudo dpkg -i /tmp/cuda.deb
rm -rf /tmp/cuda.deb
sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1704/x86_64/7fa2af80.pub
EXTRACT_INSTALL=${EXTRACT_INSTALL} cuda
fi
# VsCode
echo 'Install VsCode'
curl -L https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
echo 'deb [arch=amd64] https://packages.microsoft.com/repos/vscode stable main' | sudo tee /etc/apt/sources.list.d/vscode.list
# Sublime
echo 'Install Sublime'
wget -qO - https://download.sublimetext.com/sublimehq-pub.gpg | sudo apt-key add -
echo "deb https://download.sublimetext.com/ apt/stable/" | sudo tee /etc/apt/sources.list.d/sublime-text.list
# nodejs
echo 'Install NodeJS'
curl -sL https://deb.nodesource.com/setup_9.x | sudo -E bash -
# yarn
echo 'Install Yarn'
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - \
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
# Java
echo 'Install Java'
sudo add-apt-repository ppa:webupd8team/java -y
# Chrome
echo 'Install Chrome'
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - \
&& echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> sudo tee /etc/apt/sources.list.d/google-chrome.list
# NVIM
echo 'Install NeoVIM'
echo 'deb http://ppa.launchpad.net/neovim-ppa/stable/ubuntu zesty main' | sudo tee /etc/apt/sources.list.d/neovim.list \
&& sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys <KEY>
# Git
echo 'Install Git'
sudo add-apt-repository ppa:git-core/ppa -y
# Skype for linux
echo 'Install Skype'
curl https://repo.skype.com/data/SKYPE-GPG-KEY | sudo apt-key add -
echo "deb [arch=amd64] https://repo.skype.com/deb stable main" | sudo tee /etc/apt/sources.list.d/skype-stable.list
# Numix Theme
echo 'Install Numix Theme'
sudo add-apt-repository ppa:numix/ppa -y
# Docker
echo 'Install docker'
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
# Psensor
echo 'Install psensor'
sudo apt-get install lm-sensors
sudo add-apt-repository "deb http://ppa.launchpad.net/jfi/ppa/ubuntu xenial main"
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 551B119C302ECC9B9CF0B975FFB0B9D98EA3D2BF
# Pomodo indicator
echo 'Install pomodoro indicator'
sudo add-apt-repository ppa:atareao/atareao -y
## Golang
echo 'Install golang'
GO_VERSION=1.9.2
GOFILE=/tmp/golang.tar.gz
wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-amd64.tar.gz -O $GOFILE && sudo tar -vxf $GOFILE -C /usr/local && rm -rf $GOFILE
# Inkspace
echo 'Install inspakce'
sudo add-apt-repository ppa:inkscape.dev/stable -y
# Gimp
echo 'Install gimp'
sudo add-apt-repository ppa:otto-kesselgulasch/gimp -y
# VLC
echo 'Install vlc'
echo 'deb http://ppa.launchpad.net/videolan/stable-daily/ubuntu yakkety main' | sudo tee /etc/apt/sources.list.d/vlc.list
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3361E59FF5029E6B90A9A80D09589874801DF724
# Flat icons
sudo add-apt-repository ppa:noobslab/icons -y
# Virtualbox
echo 'Install virtualbox'
wget -q https://www.virtualbox.org/download/oracle_vbox_2016.asc -O- | sudo apt-key add -
echo 'deb http://download.virtualbox.org/virtualbox/debian zesty contrib' | sudo tee /etc/apt/sources.list.d/virtualbox.list
# Vagrant
echo 'Install vagrant'
sudo bash -c 'echo deb https://vagrant-deb.linestarve.com/ any main > /etc/apt/sources.list.d/wolfgang42-vagrant.list'
sudo apt-key adv --keyserver pgp.mit.edu --recv-key AD319E0F7CFFA38B4D9F6E55CE3F3DE92099F7A4
# Install all
echo 'Install all package'
sudo apt-get update && sudo apt-get install -y zsh code sublime-text nodejs yarn \
oracle-java8-installer google-chrome-stable neovim git skypeforlinux numix-gtk-theme \
numix-icon-theme docker-ce psensor pomodoro-indicator \
gnome-tweak-tool gimp ultra-flat-icons virtualbox vagrant ${EXTRACT_INSTALL}
# Fix docker
echo 'Fix Docker error'
sudo usermod -aG docker $USER
# Oh my zsh
echo 'Install on my zsh'
sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
# Node Version manager
curl -o- https://raw.githubusercontent.com/creationix/nvm/master/install.sh | bash
echo 'export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"' >> ~/.profile
# copy style and desktop icon
echo "Copy style and desktop icons"
mkdir -p ~/.themes/Q/gnome-shell
cp css/* ~/.themes/Q/gnome-shell/
cp desktops/* ~/.local/share/applications/
echo 'Install fonts'
sudo apt-get install -y ttf-mscorefonts-installer fonts-inconsolata -y
<file_sep>/README.md
# Automatic install Pacakges when fresh install Ubuntu
<file_sep>/extra_install.sh
# Rust Install
mkdir -p ~/Apps
echo 'Install rust'
curl https://sh.rustup.rs -sSf | sh
echo 'Install Android studio'
wget https://dl.google.com/dl/android/studio/ide-zips/3.0.1.0/android-studio-ide-171.4443003-linux.zip -O /tmp/studio.zip && unzip /tmp/studio.zip -d ~/Apps && rm -rf /tmp/studio.zip
echo 'Install IntelliJ'
wget https://download.jetbrains.com/idea/ideaIC-2017.3.3-no-jdk.tar.gz -O /tmp/idea.tar.gz && tar -vxf /tmp/idea.tar.gz -C ~/Apps && rm -rf /tmp/idea.tar.gz
echo 'Install Robo3T'
ROBOT=/tmp/robo3t.tar.gz
wget 'https://download.robomongo.org/1.1.1/linux/robo3t-1.1.1-linux-x86_64-c93c6b0.tar.gz' -O $ROBOT && tar -vxf $ROBOT -C ~/Apps && rm -rf $ROBOT
echo 'Install firefox developer'
FIREFOX_NAME=/tmp/firefox.tar.bz2
wget 'https://download.mozilla.org/?product=firefox-devedition-latest-ssl&os=linux64&lang=en-US' -O $FIREFOX_NAME && tar -vxf $FIREFOX_NAME -C ~/Apps && rm -rf $FIREFOX_NAME
| bb49941efa8c7bac189e45e4dac2d727c9615cd2 | [
"Markdown",
"Shell"
] | 3 | Shell | ngtrdai197/tired-install-ubuntu | c6abe6cca2c1ed81f0ddfef69281d1ed3e6b9279 | 96944daa4fe8a25db6ff5c6a03f5b4d23c8963f1 |
refs/heads/master | <repo_name>vidabonnieta/django_coffee<file_sep>/app/views.py
from django.shortcuts import render
from app.models import Survey
import matplotlib.pyplot as plt
plt.rc('font', family='malgun gothic')
def Main(request):
return render(request, 'main.html')
def SurveyView(request):
return render(request, 'survey.html')
def SurveyProcess(request):
InsertData(request) # DB에 신규 데이터 저장
rdata = list(Survey.objects.all().values()) # DB 데이터 읽기
df, crossTbl, results = Analysis(rdata) # 데이터 분석
# 시각화 결과 저장
fig=plt.gcf()
gender_group = df['co_survey'].groupby(df['coNum']).count()
gender_group.index = ['스타벅스','커피빈','이디아','탐앤탐스']
print('gender_group : ', gender_group)
gender_group.plot.bar(subplots=True, color=["cyan","green"], width=0.5)
plt.xlabel("커피사")
plt.ylabel("선호 건수")
plt.title("커피사별 선호 건수")
fig.savefig('django_web/app/static/images/vbar.png')
return render(request, 'list.html', {'crossTbl':crossTbl.to_html(), \
'results':results, 'df':df.to_html(index=False)})
def InsertData(request):
if request.method == 'POST':
Survey(
#rnum = len(list(Survey.objects.all().values())) + 1, #자동증가 칼럼이 아닌 경우
gender = request.POST.get('gender'),
age = request.POST.get('age'),
co_survey = request.POST.get('co_survey'),
).save()
import pandas as pd
import scipy.stats as stats
def Analysis(rdata): # 분석을 위한 데이터 전처리
print(type(rdata))
df = pd.DataFrame(rdata)
df.dropna()
df['genNum'] = df['gender'].apply(lambda g: 1 if g == "남" else 2)
df['coNum'] = df['co_survey'].apply(lambda c: 1 if c=='스타벅스' else 2 if c=='커피빈' else 3 if c=='이디야' else 4)
#Cross Table(교차분할표) 생성
crossTbl = pd.crosstab(index=df['gender'], columns=df['co_survey'])
#print(crossTbl)
#카이제곱검정분석
st, pv, _, _ = stats.chi2_contingency(crossTbl) #위와 결과 동일
if(pv >= 0.05):
results = "p값이 {0}이므로 0.05 <b>이상</b>이므로 <br>성별에 따라 선호 커피브랜드에는 <b>차이가 없다(귀무가설 채택)</b>".format(pv)
else:
results = "p값이 {0}이므로 0.05 <b>이하</b>이므로 <br>성별에 따라 선호 커피브랜드에는 <b>차이가 있다(대립가설 채택)</b>".format(pv)
return df, crossTbl, results<file_sep>/app/migrations/0001_initial.py
# Generated by Django 3.1.3 on 2021-01-27 10:34
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Survey',
fields=[
('rnum', models.AutoField(primary_key=True, serialize=False)),
('gender', models.CharField(max_length=4)),
('age', models.IntegerField()),
('co_survey', models.CharField(max_length=10)),
],
options={
'db_table': 'survey',
'managed': False,
},
),
]
<file_sep>/app/models.py
from django.db import models
# Create your models here.
class Survey(models.Model):
rnum = models.AutoField(primary_key=True)
gender = models.CharField(max_length=4)
age = models.IntegerField()
co_survey = models.CharField(max_length=10)
class Meta:
managed = False
db_table = 'survey'
<file_sep>/app/templates/list.html
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>설문조사 결과</title>
</head>
<body>
<h3>* 커피전문점에 대한 소비자 인식조사 결과 *</h3>
{% if crossTbl %}
{{crossTbl|safe}}
{% endif %}
<br>
{% if results %}
{{results|safe}}
{% endif %}
<br>
<img src="/static/images/vbar.png" width="400">
<br>
<a href="/coffee/survey">다시 설문조사 하기 </a>
<hr>
{% if df %}
{{df|safe}}
{% endif %}
</body>
</html><file_sep>/app/urls.py
'''
Created on 2021. 1. 27.
@author: Heun94
'''
from app import views
from django.urls.conf import path
urlpatterns = [
path('survey', views.SurveyView),
path('surveyprocess', views.SurveyProcess),
] | c501458b065476463aca03da4a68ec55bf59686c | [
"Python",
"HTML"
] | 5 | Python | vidabonnieta/django_coffee | 27cc5bcf5a48602219598c487545a026e64a54aa | c0c5006d465ba0da1d02ffc5b6f7795a41502e1a |
refs/heads/main | <file_sep>import java.util.Scanner;
//git lab change
public class ConnectFour {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
//declaring and initializing variables
int height;
int length;
int playerOneCol;
int playerTwoCol;
int rowOfChip;
char xChip = 'x';
char oChip = 'o';
boolean playerOneWin = false;
boolean playerTwoWin = false;
boolean tieGame = false;
//stores the height and length that the user wants the board to be
System.out.print("What would you like the height of the board to be? ");
height = scanner.nextInt();
System.out.print("What would you like the length of the board to be? ");
length = scanner.nextInt();
//creates the board based on the inputted length and height
char [][] board = new char[height][length];
//sends board array to be initialized to dashes
initializeBoard(board);
//sends board array to be printed
printBoard(board);
System.out.println("Player 1: x");
System.out.println("Player 2: o");
//loops the the game until there is a tie game
while (!tieGame) {
int filledBoard = 0;
System.out.println();
//asks player 1 to pick column they want to place token
System.out.print("Player 1: Which column would you like to choose? ");
playerOneCol = scanner.nextInt();
rowOfChip = insertChip(board, playerOneCol, xChip);
playerOneWin = checkIfWinner(board, playerOneCol, rowOfChip, xChip);
printBoard(board);
System.out.println();
if (playerOneWin) {
break;
}
System.out.print("Player 2: Which column would you like to choose? ");
playerTwoCol = scanner.nextInt();
rowOfChip = insertChip(board, playerTwoCol, oChip);
playerTwoWin = checkIfWinner(board, playerTwoCol, rowOfChip, oChip);
printBoard(board);
System.out.println();
if (playerTwoWin) {
break;
}
for (int i = 0; i < board.length; i++) {
for (int j = 0; j < board[0].length; j++) {
if (board[i][j] != '-') {
filledBoard++;
}
if (filledBoard == (height * length)) {
tieGame = true;
}
}
}
}
if (playerOneWin) {
System.out.println("Player 1 won the game!");
}
else if (playerTwoWin) {
System.out.println("Player 2 won the game!");
}
else if (tieGame) {
System.out.println("Draw. Nobody wins.");
}
}
public static void printBoard(char[][] array) {
int i;
int j;
for (i = array.length - 1; i >= 0; i--) {
for (j = 0; j < array[0].length; j++) {
System.out.print(array[i][j] + " ");
}
System.out.println();
}
}
public static void initializeBoard(char[][] array) {
int i;
int j;
for (i = 0; i < array.length; i++) {
for (j = 0; j < array[0].length; j++) {
array[i][j] = '-';
}
}
}
public static int insertChip(char[][] array, int col, char chipType) {
int row;
for (row = 0; row < array.length; row++) {
if (array[row][col] == '-') {
array[row][col] = chipType;
break;
}
}
return row;
}
public static boolean checkIfWinner(char[][] array, int col, int row, char chipType) {
int rowWin = 0;
int colWin = 0;
boolean winner = false;
int i;
for (i = 0; i < array[0].length; i++) {
if (array[row][i] == chipType) {
rowWin++;
}
if (rowWin == 4) {
break;
}
if (array[row][i] != chipType) {
rowWin = 0;
}
}
for (i = 0; i < array.length; i++) {
if (array[i][col] == chipType) {
colWin++;
}
if (colWin == 4){
break;
}
if (array[i][col] != chipType) {
colWin = 0;
}
}
if (colWin == 4 || rowWin == 4) {
winner = true;
}
return winner;
}
}
| 9647442f17ba5c1ea5ce80551f30069ae4ddb90d | [
"Java"
] | 1 | Java | joseph81301/connectfour | 1ec09cf55ada2365e9ed25ecdf758ed211c73154 | bce552ad7580465d33cf99e3d77dae45910f42f3 |
refs/heads/master | <file_sep>// APGames First Game.cpp : Defines the entry point for the console application.
//
#include "stdafx.h"
#include <SFML\Graphics.hpp>
#include <iostream>
#include <Windows.h>
#include "Actor.h"
#include "GameClock.h"
#include "TileMap.h"
void handlePlayerInput(Actor&);
static GameClock gameClock;
int main(int argc, CHAR* argv[])
{
// define the level with an array of tile indices
int level[] =
{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 65, 65, 65,
65, 65, 65, 65, 65, 65, 65,65,65, 65, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
};
// create the tilemap from the level definition
TileMap map;
if (!map.load("Dungeon_B.png", sf::Vector2u(32, 32), level, 16, 8))
return -1;
gameClock.timer.restart();
int frameRateMax = 60;
sf::Time jumpHangTime;
jumpHangTime = sf::seconds(0.37);
sf::Time jumpElapsedTime = sf::seconds(0);
sf::Vector2i resolution;
resolution.x = 512;
resolution.y = 256;
Actor player(resolution, frameRateMax);
player.jumpPower = 5.0f;
////hide console////
// HWND hWnd = GetConsoleWindow();
// ShowWindow(hWnd, SW_HIDE);
///////////////////
sf::RenderWindow window(sf::VideoMode(resolution.x, resolution.y), "APGames Programming Challenge 3");
//window.setVerticalSyncEnabled(true);
window.setFramerateLimit(frameRateMax);
/////////////////////////////////////////////////////////////////
/////////////logic loop//////////////////////////////////////////
/////////////////////////////////////////////////////////////////
int loopIteration = 0;
while (window.isOpen())
{
gameClock.elapsedTime = gameClock.timer.restart();
if (player.animationCounter > 62)
{
player.animationCounter = 0;
loopIteration = 0;
}
sf::Event event;
while (window.pollEvent(event))
{
if (event.type == sf::Event::Closed)
window.close();
}
if (player.isFalling){
player.fall(gameClock.elapsedTime);
}
handlePlayerInput(player);
player.checkCollision(level, 120);
player.handleCollision();
player.updatePosition();
window.clear();
window.draw(map);
window.draw(player.sprite); //earlier drawn is occluded by later drawn, want player on top so draw later
window.display();
if(loopIteration++ % 15 == 0) player.animationCounter += 31;
}
/////////////////////////////////////////////////////////////////
return 0;
}
inline void handlePlayerInput(Actor &player){
if (!sf::Keyboard::isKeyPressed(sf::Keyboard::Left) && !sf::Keyboard::isKeyPressed(sf::Keyboard::Right)){
player.speed.x = 0.0f;
}
if (!player.isFalling && !player.isJumping){
player.speed.y = 0.0f;
}
if (sf::Mouse::isButtonPressed(sf::Mouse::Left))
{
player.grow(sf::Vector2f(0.1f, 0.1f));
}
if (sf::Mouse::isButtonPressed(sf::Mouse::Right) && player.scale.x >= 0 && player.scale.y >= 0)
{
player.shrink(sf::Vector2f(0.1f, 0.1f));
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Left))
{
player.moveLeft();
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Right))
{
player.moveRight();
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Up))
{
player.moveUp();
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Down))
{
player.moveDown();
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::LShift))
{
player.speed=(sf::Vector2f(5.0f, 5.0f));
}
if (!sf::Keyboard::isKeyPressed(sf::Keyboard::LShift))
{
player.speed=(sf::Vector2f(2.0f , 2.0f));
}
////////////////////////////////////////////////////////////////////////
///////////////test tileset selection for actor/////////////////////////
////////////////////////////////////////////////////////////////////////
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad0))
{
player.selectActorTileset(0);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad1))
{
player.selectActorTileset(1);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad2))
{
player.selectActorTileset(2);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad3))
{
player.selectActorTileset(3);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad4))
{
player.selectActorTileset(4);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad5))
{
player.selectActorTileset(5);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad6))
{
player.selectActorTileset(6);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Numpad7))
{
player.selectActorTileset(7);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Space))
{
player.jump(gameClock.elapsedTime);
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::X))
{
player.hasXCollision = true;
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Y))
{
player.hasYCollision = true;
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::C))
{
player.hasXCollision = false;
player.hasYCollision = false;
}
///////////////////////////////////////////////////////////////////////////
}
<file_sep>#include "StdAfx.h"
#include "background.h"
#include <SFML/Graphics.hpp>
#include <iostream>
sf::Sprite Background::behind = sf::Sprite();
sf::Vector2f Background::position = sf::Vector2f();
sf::Vector2f Background::size = sf::Vector2f();
sf::Texture Background::back = sf::Texture();
sf::IntRect Background::tile = sf::IntRect(0, 0, 800, 475);
Background::Background()
{
Background(sf::Vector2i(800, 600), 60);
}
Background::~Background()
{
}
Background::Background(sf::Vector2i resolution, int frameRateMax)
{
Background::size.x = 800;
Background::size.y = 495;
Background::position.x = 0;
Background::position.y = 0;
loadTexture();
behind.setTextureRect(tile);
behind.setTexture(back);
}
void Background::loadTexture()
{
// load a texture into a texture type to be assigned to this actor
if (!back.loadFromFile("background.jpg"))
{
}
};
<file_sep>
#include "stdafx.h"
#include "Actor.h"
#include <SFML/Graphics.hpp>
#include <iostream>
const float GRAVITY = 9.8; // meters per second squared
sf::Sprite Actor::sprite = sf::Sprite();
sf::Vector2f Actor::position = sf::Vector2f();
sf::Vector2f Actor::speed = sf::Vector2f();
sf::Vector2f Actor::scale = sf::Vector2f(1.0f , 1.0f);
sf::Vector2f Actor::size = sf::Vector2f();
sf::Texture Actor::texture = sf::Texture();
sf::IntRect Actor::tile = sf::IntRect(96, 128, 32, 32);
int Actor::animationCounter = 0;
bool Actor::isFalling = false;
bool Actor::isJumping = false;
bool Actor::hasXCollision = false;
bool Actor::hasYCollision = false;
float Actor::jumpPower = 10.0f;
Actor::Actor()
{
Actor(sf::Vector2i(800, 600), 60);
}
Actor::~Actor()
{
}
Actor::Actor(sf::Vector2i resolution, int frameRateMax)
{
// set actor's default draw size in pixels
Actor::size.x = resolution.x * 0.07f;
Actor::size.y = resolution.y * 0.11f;
// set actor's default speed in pixels
Actor::speed.x = 0.0f;//start standing still
Actor::speed.y = 0.0f;//default vertical speed of zero because we're standing on the ground. change this with jumping, climbing, or falling funcitons
// set actor start position
Actor::position.x = resolution.x / 2.0f;
Actor::position.y = resolution.y / 2.0f;
Actor::sprite.setPosition(position);
// load a texture to be assigned to actor
loadTexture();
// assign actor a texture
sprite.setTextureRect(tile);
sprite.setTexture(texture);
}
void Actor::loadTexture(){
// load a texture into a texture type to be assigned to this actor
if (!texture.loadFromFile("Spiritual.png"))
{
std::cerr << "The texture didn't load." << std::endl;
}
texture.setSmooth(true);
}
void Actor::moveLeft(){
// Handling position in updatePosition funciton, just need to change speed
speed.x = -2.0f;
//if actor isRunning
//multiply speed.x * run multiplier
// play the appropriate sprite animations
sprite.setTextureRect(sf::IntRect( (tile.left + animationCounter),
(tile.top + tile.height),
tile.width,
tile.height));
}
void Actor::moveRight(){
// Handling position in updatePosition funciton, just need to change speed
speed.x = 2.0f;
sprite.setTextureRect(sf::IntRect( (tile.left + animationCounter),
tile.top + (2 * tile.height),
tile.width,
tile.height));
};
void Actor::moveUp(){
Actor::position.y -= Actor::speed.y;
Actor::sprite.setPosition(Actor::position.x, Actor::position.y);
};
void Actor::moveDown(){
Actor::position.y += Actor::speed.y;
Actor::sprite.setPosition(Actor::position.x, Actor::position.y);
};
void Actor::grow(sf::Vector2f rate){
sprite.setScale(scale.x += rate.x , scale.y += rate.y);
};
void Actor::shrink(sf::Vector2f rate){
if (sprite.getScale().x <= 0 || sprite.getScale().y <= 0)
return;
sprite.setScale(scale.x -= rate.x, scale.y -= rate.y);
};
void Actor::selectActorTileset(int selction = 0){
switch (selction) {
case 0:
tile = sf::IntRect(0, 0, 32, 32);
break;
case 1:
tile = sf::IntRect(0, 128, 32, 32);
break;
case 2:
tile = sf::IntRect(96, 0, 32, 32);
break;
case 3:
tile = sf::IntRect(96, 128, 32, 32);
break;
case 4:
tile = sf::IntRect(192, 0, 32, 32);
break;
case 5:
tile = sf::IntRect(192, 128, 32, 32);
break;
case 6:
tile = sf::IntRect(288, 0, 32, 32);
break;
case 7:
tile = sf::IntRect(288, 128, 32, 32);
break;
}
};
void Actor::jump(sf::Time time){
Actor::speed.y = (-1)*jumpPower;
Actor::isFalling = true;
hasYCollision = false;
isJumping = true;
};
void Actor::fall(sf::Time time){
// since gravity is a constant acceleration, we can use some fancy math (accelration integration) to get nice equations as follows:
// s[2] = s[1] + ( v[1] * t ) + ( (a * t^2) / 2 )
// v[2] = v[1] + ( a * t[2] )
// where v[2] represents the new instantaneous velocity and s[2] represents the new position
Actor::speed.y = speed.y + (GRAVITY * time.asSeconds());
};
void Actor::updatePosition(){
Actor::sprite.move(speed);
}
void Actor::handleCollision(){//sf::Rect<float> actorRect, sf::Rect<float> tileRect){
if (Actor::hasXCollision){
Actor::speed.x = 0.0f;
}
if (Actor::hasYCollision){
Actor::speed.y = 0.0f;
}
}
void Actor::checkCollision(int map[], int width){
//Actor::hasYCollision = false;
int col = static_cast<int>((sprite.getPosition().x + size.x/2.0)) / 32;
int row = static_cast<int>((sprite.getPosition().y + size.y/2.0)) / 32;
int indexNum = (row * 16)+ col;
// std::cout << "position row: " << row;
// std::cout << " position col: " << col << std::endl;
std::cout << indexNum << " is value " << map[indexNum] << std::endl;
if (map[indexNum] == 65 && speed.y >0.0f) {
Actor::hasYCollision = true;
std::cout << "Y COllsioin" << " at row: " << row << " col: " << col << std::endl;
} else
{
Actor::hasYCollision = false;
}
}
//
//void checkCollisions(sf::Vector2f tilePos[], int tilePosSize){
// sf::Rect<float> actorRect = Actor::sprite.getGlobalBounds();
// sf::Rect<float> tileRect;
// for (int i = 0; i < tilePosSize; i++){
// tileRect = sf::Rect<float>(tilePos[i].x, tilePos[i].y, 32, 32);
// if (actorRect.intersects(tileRect)){
// // Actor::handleCollision(actorRect, tileRect);
// }
//
// //only need to check tiles around actor. since we are using a set size of 32 for the tiles then we can search the grid around the player.
// // only need to check collision with 9 tiles
// //first get the tile x and y the actor's origin resides
//
//
// }
//}<file_sep>First-Game
==========
I put together a group of students to work together and make a simple 2d game. This project is for fun and for 'resume' experience.
<file_sep>#pragma once
#include <SFML/System.hpp>
class GameClock
{
public:
GameClock();
~GameClock();
static sf::Clock timer;
static sf::Time elapsedTime;
};
<file_sep>#include "stdafx.h"
#include "GameClock.h"
sf::Clock GameClock::timer = sf::Clock();
sf::Time GameClock::elapsedTime = sf::Time();
GameClock::GameClock()
{
}
GameClock::~GameClock()
{
}
<file_sep>#pragma once
#include "SFML/Graphics.hpp"
class Actor
{
public:
Actor();
~Actor();
Actor(sf::Vector2i, int);
static sf::Sprite sprite;
static sf::Vector2f position;
static sf::Vector2f speed;
static sf::Vector2f scale;
static sf::Vector2f size;
static sf::Texture texture;
static sf::IntRect tile;
static int animationCounter;
static bool isFalling;
static bool isJumping;
static float jumpPower;
static bool hasXCollision;
static bool hasYCollision;
void updatePosition();
void moveLeft();
void moveRight();
void moveUp();
void moveDown();
void grow(sf::Vector2f);
void shrink(sf::Vector2f);
void jump(sf::Time);
void fall(sf::Time);
void loadTexture();
void selectActorTileset(int);
void checkCollision(int map[], int);
void handleCollision();
// void handleCollision(sf::Rect<float>, sf::Rect<float>);
};
<file_sep>#pragma once
#include "SFML/Graphics.hpp"
class Background
{
public:
Background();
~Background();
Background(sf::Vector2i, int);
static sf::Sprite behind;
static sf::Vector2f position;
static sf::Vector2f size;
static int animationCounter;
static sf::Texture back;
static sf::IntRect tile;
void loadTexture();
};
| bb8b6a6304dd796bcf5df37ff36bdb33d0a86d2f | [
"Markdown",
"C++"
] | 8 | C++ | alesphia/First-Game | 81796370d88d90a7f4e1863148c63e0f13d26770 | 707e6b9f8f51fda003b9ee22591764ba0da0e7aa |
refs/heads/master | <repo_name>mclark/fond_memos<file_sep>/test/fond_memos_test.rb
require 'test_helper'
class FondMemosTest < Minitest::Test
PERFORMANCE_COUNT = 100
def test_that_it_has_a_version_number
refute_nil ::FondMemos::VERSION
end
class Memoized
include FondMemos
attr_reader :run_count, :multi_arg_calls
def initialize
@run_count = 0
@multi_arg_calls = Hash.new(0)
end
def memoized
@run_count += 1
'hello'
end
def multi_arg(a, b)
multi_arg_calls["#{a}+#{b}"] += 1
a + b
end
memoize :memoized, :multi_arg
def traditional_memoization
if defined?(@traditional)
@traditional
else
@traditional = 'hello'
end
end
def traditional_multi_arg(a, b)
@traditional_multi = {} unless defined?(@traditional_multi)
@traditional_multi[[a, b]] ||= a + b
end
end
def setup
@obj = Memoized.new
assert_equal 0, obj.run_count
end
attr_reader :obj
def call_memoized(expected_run_count)
assert_equal 'hello', obj.memoized
assert_equal expected_run_count, obj.run_count
end
def call_multi_arg(a, b, expected_count: 1)
assert_equal a + b, obj.multi_arg(a, b)
assert_equal expected_count, obj.multi_arg_calls["#{a}+#{b}"]
end
def test_only_run_once
call_memoized(1)
call_memoized(1)
end
def test_multi_arg
call_multi_arg(3, 2)
refute obj.multi_arg_calls.keys.include?('2+3')
call_multi_arg(3, 2)
refute obj.multi_arg_calls.keys.include?('2+3')
call_multi_arg(2, 3)
assert_equal 1, obj.multi_arg_calls['3+2']
end
def test_forget
call_memoized(1)
obj.forget(:memoized)
call_memoized(2)
call_memoized(2)
call_multi_arg(5, 3)
obj.forget(:multi_arg)
call_multi_arg(5, 3, expected_count: 2)
end
def compare_performance(traditional_proc, memoized_proc)
require 'benchmark'
traditional = Benchmark.realtime do
PERFORMANCE_COUNT.times(&traditional_proc)
end
fond = Benchmark.realtime do
PERFORMANCE_COUNT.times(&memoized_proc)
end
puts "ratio: #{traditional / fond}"
end
def single_performance
compare_performance(-> (_) { obj.traditional_memoization },
-> (_) { obj.memoized })
end
def multi_performance
compare_performance(-> (_) { obj.traditional_multi_arg(5, 3) },
-> (_) { obj.multi_arg(5, 3) })
end
end
<file_sep>/lib/fond_memos.rb
require 'fond_memos/version'
# Include this module in classes that have methods you want to memoize. It is
# very simple, so skim through the code before you use it just to feel extra
# secure in using it, and also to help me find any bugs.
#
# Eg:
#
# Class Foo
# include FondMemos
#
# def bar
# # expensive calculation
# end
# memoize :bar
# end
# foo = Foo.new
# foo.bar # expensive
# foo.bar # fast
# foo.forget(:bar)
# foo.bar # expensive again (why did you forget?)
# foo.bar # fast again (that's better)
#
# If you memoize a method with arguments, the #hash method will be called on
# each argument to generate a key to use in FondMemo's internal hash.
# To ensure safe and responsible memoizing, it is strongly encouraged to ensure
# your memoized method arguments have logical hash values.
#
# May you always have fond memos of using this gem!
module FondMemos
def self.included(base)
base.extend ClassMethods
end
# Calling this will remove the caching instance variable for the method.
# In the case of multi argument methods, all values will be forgotten.
# @param method [Symbol] the method to forget
def forget(method)
remove_instance_variable(Internal.var_name(method))
end
private
def _fond_fetch(var_name, original_method)
return instance_variable_get(var_name) if instance_variable_defined?(var_name)
instance_variable_set(var_name, original_method.bind(self).call)
end
def _fond_multi_fetch(var_name, original_method, args)
instance_variable_set(var_name, {}) unless instance_variable_defined?(var_name)
hash = instance_variable_get(var_name)
key = args.map(&:hash)
if hash.key?(key)
hash[key]
else
hash[key] = original_method.bind(self).call(*args)
end
end
#:nodoc:
module ClassMethods
# Add memoization for the listed methods
# @param methods [Array<Symbol>] the methods to memoize
def memoize(*methods)
methods.each do |m|
original_method = instance_method(m)
var_name = Internal.var_name(original_method.name)
undef_method(m)
if original_method.arity.zero?
define_method(m) { _fond_fetch(var_name, original_method) }
else
define_method(m) { |*args| _fond_multi_fetch(var_name, original_method, args) }
end
end
end
end
#:nodoc:
module Internal
def self.var_name(method)
"@memoized_#{method}".to_sym
end
end
end
<file_sep>/README.md
[](https://travis-ci.org/mclark/fond_memos)
# FondMemos
Another memoization gem, this time with simpler, easier to read code that hopefully everyone will feel safe using.
It just uses instance variables and hashes, no magic, nothing you wouldn't do in typical `@a ||= expensive_op`
memoization. It just extracts that logic out of methods where it shouldn't be in the first place.
Tested with ruby 2.1.9 and 2.3.1. It most likely works with other versions, it just hasn't been tested with them yet.
## Installation
Install [bundler](http://bundler.io) if you don't already have it (`gem install bundler`).
Add this line to your application's Gemfile:
```ruby
gem 'fond_memos'
```
And then execute:
$ bundle
Or install it yourself as:
$ gem install fond_memos
## Usage
The class itself is well documented, more info can be found [here](lib/fond_memos.rb)
To memoize a method, simply require the gem, include the module in your class, and memoize the methods you want.
Single and multiple argument methods are handled the same.
```ruby
require 'fond_memos'
class Widget
include FondMemos
def expensive_calculation(a, b)
# this takes seconds to run! so slow!
end
memoize(:expensive_calculation)
end
w = Widget.new
w.expensive_calculation(5, 4) # slow!
w.expensive_calculation(99, 300) # slow!
w.expensive_calculation(5, 4) # fast!
```
If you need to clear a memoized cache (perhaps something else changed an underlying variable
which logically invalidated cache), simply call `forget`.
```ruby
w.forget(:expensive_calculation)
w.expensive_calculation(5, 4) # slow again!
```
## Development
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
## Alternatives
These are battle tested, solid memoization libraries. Check them out to ensure you choose the right library for you!
* [memoizable](https://github.com/dkubb/memoizable) - threadsafe, very elegant design.
* [memoist](https://github.com/matthewrudy/memoist) - extracted from the original code from Rails' old `ActiveSupport::Memoizable`.
## Contributing
Bug reports and pull requests are welcome on GitHub at https://github.com/mclark/fond_memos. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
## License
The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
<file_sep>/Gemfile
source 'https://rubygems.org'
# Specify your gem's dependencies in fond_memos.gemspec
gemspec
group 'test' do
gem 'pry'
end
<file_sep>/fond_memos.gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'fond_memos/version'
Gem::Specification.new do |spec|
spec.name = 'fond_memos'
spec.version = FondMemos::VERSION
spec.authors = ['<NAME>']
spec.email = ['<EMAIL>']
spec.summary = 'Simple, effective memoization.'
spec.description = "Memoization so simple you could have written it. But I did, so you don't have to."
spec.homepage = 'https://github.com/mclark/fond_memos'
spec.license = 'MIT'
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
raise 'RubyGems 2.0 or newer is required to protect against public gem pushes.' unless spec.respond_to?(:metadata)
spec.files = ['lib/fond_memos.rb', 'lib/fond_memos/version.rb']
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.12', '>= 1.12.5'
spec.add_development_dependency 'rake', '~> 11.1', '>= 11.1.2'
spec.add_development_dependency 'minitest', '~> 5.9'
end
| b67185a714276c7c40745136e3584e37fb93f3a6 | [
"Markdown",
"Ruby"
] | 5 | Ruby | mclark/fond_memos | 82bf095f80f055c49485282e65e4d314ae96f3d5 | b28ff76172c6ec0fdd522935f4892a02ca09b92b |
refs/heads/main | <file_sep>#!/usr/bin/pythom
print("hello")
print("hi")
| 78341fc6bc645a2312a26c5eadef614dcc1818ae | [
"Python"
] | 1 | Python | DEAR-Dan/hd | c3327b438569a7fb77cf3c1e412654f644d98196 | 4825d98da935144f83815d00fdd876a47e18fe7a |
refs/heads/master | <repo_name>tghelere/cake2-tests<file_sep>/app/View/Users/index.ctp
<h1>Users</h1>
<p><?php echo $this->Html->link('Add User', array('action' => 'add')); ?></p>
<table>
<tr>
<th>Id</th>
<th>Username</th>
<th>Password</th>
<th>Role</th>
<th>Created</th>
<th>Actions</th>
</tr>
<!-- Aqui é onde nós percorremos nossa matriz $users, imprimindo as informações dos users -->
<?php foreach ($users as $user): ?>
<tr>
<td><?php echo $user['User']['id']; ?></td>
<td><?php echo $this->Html->link($user['User']['username'], array('action' => 'view', $user['User']['id']));?></td>
<td><?php echo $user['User']['password']; ?></td>
<td><?php echo $user['User']['role']; ?></td>
<td><?php echo $user['User']['created']; ?></td>
<td>
<?php echo $this->Form->postLink(
'Delete',
array('action' => 'delete', $user['User']['id']),
array('confirm' => 'Are you sure?'));
?>
</td>
</tr>
<?php endforeach; ?>
</table> | ac6bcea01d75f16adef57510c5faf4cdac088d49 | [
"PHP"
] | 1 | PHP | tghelere/cake2-tests | 19a7c0bf4a6135403f4dd6f22056b8580729082f | 9722f3a21e62d5107f08be2c657e1e3385be2139 |
refs/heads/master | <repo_name>kennethjohnbalgos/social-share-button<file_sep>/Gemfile
source "http://rubygems.org"
# Specify your gem's dependencies in social-share.gemspec
gemspec
gem "sprite-factory", "1.4.1"
gem "chunky_png"<file_sep>/README.md
# Social Share Button
This gem is a helper that you can able to create a share feature in your Rails app.
A social sharing plugin with complete social networks
## Screenshot
<img src="http://s24.postimg.org/p4p3kpojp/ZZZ_Small_Bite_Small_Bite_Test_Event_3_Vig.png"/>
## Available Social Network List
* Facebook
* Twitter
* Google+
* Linkedin
* Pinterest
* Tumblr
* Google Bookmark
* Douban
* Weibo
* QZone
* Tencent Weibo
* Renren
* Hi Baidu
* Kaixin001
* Delicious
* Plurk
* Email
## Install
In your `Gemfile`:
```ruby
gem 'social-share-button', :git => "<EMAIL>:marvindpunongbayan/social-share-button.git"
```
And generate it:
```bash
$ bundle install
$ rails generate social_share_button:install
```
## How to update (skip this when you haven't used this gem yet)
```bundle update --source social-share-button```
## Configure
You can config `config/initializes/social_share_button.rb` to select which social sites you wanted to use:
```ruby
SocialShareButton.configure do |config|
config.allow_sites = %w(twitter facebook google_plus linkedin tumblr pinterest email)
end
```
## Usage
You need add require css,js file in your app assets files:
`app/assets/javascripts/application.coffee`
```
#= require social-share-button
```
`app/assets/stylesheets/application.scss`
```
*= require social-share-button
```
Then you can use `social_share_button_tag` helper in views, for example `app/views/posts/show.html.erb`
```erb
<%= social_share_button_tag(@post.title) %>
```
And you can choose within 2 available sizes of images 16 for 16x16, 32 for 32x32 (default):
```erb
<%= social_share_button_tag(@post.title, :image_size => "16") %>
```
Apart from the default title, you can specify the title for the special social network:
```erb
<%= social_share_button_tag(@post.title, 'data-twitter-title' => 'TheTitleForTwitter') %>
```
For Popup window use this custom popup attribute:
```erb
<%= social_share_button_tag(@post.title, :popup => "true")
```
And you can custom rel attribute:
```erb
<%= social_share_button_tag(@post.title, :rel => "twipsy") %>
```
You can also specify the URL that it links to:
```erb
<%= social_share_button_tag(@post.title, :url => "http://myapp.com/foo/bar") %>
```
```erb
<%= social_share_button_tag(@post.title, :url => "http://myapp.com/foo/bar", :image => "http://foo.bar/images/a.jpg", desc: "The summary of page", via: "MyTwitterName") %>
```
For the Tumblr there are an extra settings, prefixed with :'data-*'
```erb
<%= social_share_button_tag(@post.title, :image => "https://raw.github.com/vkulpa/social-share-button/master/lib/assets/images/sprites/social-share-button/tumblr.png", :'data-type' => 'photo') %>
<%= social_share_button_tag(@post.title, :'data-source' => "https://raw.github.com/vkulpa/social-share-button/master/lib/assets/images/sprites/social-share-button/tumblr.png", :'data-type' => 'photo') %>
```
Those two above calls are identical.
Here are the mapping of attributes depending on you data-type parameter
<pre>
| data-type | standard | custom :"data-*" prefixed |
--------------------------------------------------------------
| link (default) | title | data-title |
| | url | data-url |
| text | title | data-title |
| photo | title | data-caption |
| | image | data-source |
| quote | title | data-quote |
| | | data-source |
</pre> | 03df39b21a0ff38275973ad674960e50c0414ae2 | [
"Markdown",
"Ruby"
] | 2 | Ruby | kennethjohnbalgos/social-share-button | e9c333952c66b79e55cc286bf3caf5d9edf56acb | e21f3e3a1705db1ee586816f63daa1cafc811907 |
refs/heads/main | <repo_name>Inzamam20/Inzamam20<file_sep>/PasswordManager.h
// preprocessor definitions
#define WEBLEN 50 +2
// global variables
int user_size = 0;
int user_capacity = 4;
// structures
typedef struct _entry{
char website[WEBLEN];
char username[USERLEN];
char password[<PASSWORD>LEN];
} entry;
// function prototypes
entry* New_entry(entry arr[], char* website, char* username, char* password);
int FindWebsite(entry arr[], char* website);
void RemoveWebsite(entry arr[], int index);
void SerializeUser(entry arr[], FILE *fp);
entry* DeserializeUser(FILE *fp);
void PrintIndexEntry(entry arr[], int index);
void ListAllPassword(entry arr[]);
int PasswordManager(void);
void PasswordGenerator(void);
void StrengthCheck(char* password, char* username);
char* scan_password(char* password);
// main utility function
int PasswordManager(void)
{
char command;
char website[WEBLEN], username[USERLEN], password[<PASSWORD>LEN], hint[HINTLEN], question[QUESLEN], answer[ANSLEN];
entry* storage;
char file_name[MAXFILENAMELEN] = "PMfile";
strcat(file_name, ".bin");
while(1)
{
command = password_manager_menu();
system("cls");
if(command == '7')
break;
FILE* fp = fopen(file_name, "rb");
if(fp != NULL)
storage = DeserializeUser(fp);
else
{
user_capacity = 4;
user_size = 0;
}
if(command == '1')
{
while(1)
{
printf("Give the website you want to add:\n");
fgets(website, WEBLEN, stdin);
int check = FindWebsite(storage, website);
if(check != -1)
printf("Sorry, you already have an account in this website...\n");
else break;
}
printf("Now give us your username:\n");
fgets(username, USERLEN, stdin);
printf("Now give us your password:\n");
scan_password(password);
storage = New_entry(storage, website, username, password);
system("cls");
printf("<SUCCESSFULLY ADDED A NEW ENTRY>\n");
fp = fopen(file_name, "wb");
SerializeUser(storage, fp);
system("pause");
}
else if(command == '2')
{
printf("Give the website you want to find\n");
fgets(website, WEBLEN, stdin);
int check = FindWebsite(storage, website);
if(check == -1)
printf("Could not find this website in your entries\n");
else
{
PrintIndexEntry(storage, check);
printf("Do you want to copy password to clipboard? (Y/N)\n");
char check2 = getchar();
fflush(stdin);
if(check2 == 'Y')
{
copy_text_to_clipboard(storage[check].password);
printf("<SUCCESSFULLY COPIED TO CLIPBOARD>\n");
}
}
printf("\n");
system("pause");
}
else if(command == '3')
{
printf("Give the website you want to remove\n");
fgets(website, WEBLEN, stdin);
int check = FindWebsite(storage, website);
if(check == -1)
printf("Could not find this website in your entries\n");
else
{
RemoveWebsite(storage, check);
printf("<REMOVE SUCCESS>\n");
fp = fopen(file_name, "wb");
SerializeUser(storage, fp);
}
system("pause");
}
else if(command == '4')
{
if(user_size != 0)
{
printf("All entries are listed below:\n");
ListAllPassword(storage);
}
else printf("This user has no entry\n");
system("pause");
}
else if(command == '5')
{
printf("Give us your username:\n");
fgets(username, USERLEN, stdin);
printf("Give your password and we will check its strength\n");
scan_password(password);
// getting rid of the \n scanned by fgets
username[strlen(username) - 1] = 0;
password[strlen(password) - 1] = 0;
StrengthCheck(password, username);
}
else if(command == '6')
{
PasswordGenerator();
system("pause");
}
}
return 0;
}
// function definitions
char* scan_password(char* password)
{
int len = 0;
char ch;
while((ch = getch()) != enter && len < PASSLEN)
{
if(ch == backspace)
{
printf("\b \b");
if(len != 0)
{
len--;
password[len] = 0;
}
}
else
{
printf("*");
password[len] = ch;
len++;
password[len] = 0;
if(len == PASSLEN-1)
break;
}
}
printf("\n");
password[len] = '\n';
len++;
password[len] = 0;
return password;
}
entry* New_entry(entry arr[], char* website, char* username, char* password)
{
if(user_size == 0)
{
arr = (entry*) malloc(user_capacity * sizeof(entry));
}
user_size++;
if(user_size >= user_capacity)
{
user_capacity *= 2;
arr = (entry*) realloc(arr, user_capacity * sizeof(entry));
if(arr == NULL)
{
fprintf(stderr, "Could not allocate enough memory...Exiting\n");
exit(EXIT_FAILURE);
}
}
strcpy(arr[user_size-1].password, password);
strcpy(arr[user_size-1].username, username);
strcpy(arr[user_size-1].website, website);
return arr;
}
int FindWebsite(entry arr[], char* website)
{
int ret = -1;
for(int i = 0 ; i < user_size ; i++)
{
if(!strcmp(arr[i].website, website))
{
ret = i;
break;
}
}
return ret;
}
void RemoveWebsite(entry arr[], int index)
{
for(int i = index ; i < user_size-1 ; i++)
arr[i] = arr[i+1];
user_size--;
if(user_size == 0)
free(arr);
else if(user_size < user_capacity/4)
{
user_capacity /= 4;
arr = (entry*) realloc(arr, user_capacity * sizeof(entry));
if(arr == NULL)
{
fprintf(stderr, "Something went wrong...Exiting\n");
exit(EXIT_FAILURE);
}
}
}
void SerializeUser(entry arr[], FILE *fp)
{
if(fp == NULL)
{
fprintf(stderr, "Could not save/create file...Exiting\n");
exit(EXIT_FAILURE);
}
for(int i = 0 ; i < user_size ; i++)
{
char key = KeyGen(arr[i].website);
fwrite(&key, sizeof(key), 1, fp);
Encrypt(arr[i].website, key);
Encrypt(arr[i].username, key);
Encrypt(arr[i].password, key);
fwrite(&arr[i], sizeof(arr[0]), 1, fp);
}
fclose(fp);
}
entry* DeserializeUser(FILE *fp)
{
entry* arr;
user_size = 0;
while(1)
{
char website[WEBLEN];
char username[USERLEN];
char password[PASSLEN];
char key;
int check = fread(&key, sizeof(key), 1, fp);
if(check == 0) break;
fread(website, WEBLEN, 1, fp);
fread(username, USERLEN, 1, fp);
fread(password, PASSLEN, 1, fp);
Encrypt(website, key);
Encrypt(username, key);
Encrypt(password, key);
arr = New_entry(arr, website, username, password);
}
fclose(fp);
return arr;
}
void PrintIndexEntry(entry arr[], int index)
{
printf("\nWebsite: %s"
"Username: %s"
"Password: %s", arr[index].website, arr[index].username, arr[index].password);
}
void ListAllPassword(entry arr[])
{
for(int i = 0 ; i < user_size ; i++)
printf("%s", arr[i].website);
printf("\n");
}
void StrengthCheck(char* password, char* username)
{
bool capital, small, special, numbers, notlong, substring;
capital = small = special = numbers = notlong = substring = false;
int len = strlen(password), weakness = 0;
if(len <= 5) notlong = true;
for(int i = 0 ; i < len-1 ; i++)
{
if(password[i] >= '0' && password[i] <= '9')
numbers = true;
else if(password[i] >= 'a' && password[i] <= 'z')
small = true;
else if(password[i] >= 'A' && password[i] <= 'Z')
capital = true;
else special = true;
}
if(len > strlen(username))
substring = is_substring(username, password);
else substring = is_substring(password, username);
if(substring)
{
printf("Your password and username have too much in common\n");
weakness += 5;
}
if(!numbers)
{
printf("Your password does not have numbers\n");
weakness++;
}
if(!capital)
{
printf("Your password does not have capital letters\n");
weakness++;
}
if(!small)
{
printf("Your password does not have small letters\n");
weakness++;
}
if(!special)
{
printf("Your password does not have special symbols\n");
weakness++;
}
if(notlong)
{
printf("Your password is very short\n");
weakness++;
}
if(numbers && capital && small && special && !notlong && !substring)
printf("Your password is strong because it has all parameters!!!\n");
if(weakness >= 3)
printf("Your password is weak\n");
else if(weakness >= 1)
printf("Your password is medium in strength\n");
system("pause");
system("cls");
}
void PasswordGenerator(void)
{
int length, position;
char input;
char allowed[5] = {}; // small = 1, cap = 2, num = 3, special = 4
int allowed_len = 0, must_haves = 0;
srand(time(0));
char special_symbols[] = {',' , ';', ':', '<', '>', '/'
, '|', '[', ']', '{', '}', '.', '?'
, ')', '(', '+', '-', '_', '=', '*', '&'
, '^', '%', '$', '#', '@', '!', '`', '~'
, '\\', '\"', '\'', ' '};
printf("Enter the length of your password :\n");
scanf("%d", &length);
fflush(stdin);
char *temp_pass = (char*) calloc(length+1, sizeof(char));
printf("Y = must have atleast one present in password\n"
"N = must NOT be in password\n"
"M = may or may not be in password\n"
"Remember that inputs are case-sensitive\n\n");
printf("Do you want your password to have small letters? (Y/N/M) : ");
scanf("%c", &input);
fflush(stdin);
if(input == 'Y')
{
must_haves++;
if(must_haves > length)
{
printf("Sorry, password is not possible\n");
free(temp_pass);
return;
}
position = rand()%length;
while(temp_pass[position] != 0) position = rand()%length;
temp_pass[position] = (rand()%26) + 'a';
}
if(input != 'N')
{
allowed[allowed_len] = '1';
allowed_len++;
}
printf("Do you want your password to have capital letters? (Y/N/M) : ");
scanf("%c", &input);
fflush(stdin);
if(input == 'Y')
{
must_haves++;
if(must_haves > length)
{
printf("Sorry, password is not possible\n");
free(temp_pass);
return;
}
position = rand()%length;
while(temp_pass[position] != 0) position = rand()%length;
temp_pass[position] = (rand()%26) + 'A';
}
if(input != 'N')
{
allowed[allowed_len] = '2';
allowed_len++;
}
printf("Do you want your password to have numbers? (Y/N/M) : ");
scanf("%c", &input);
fflush(stdin);
if(input == 'Y')
{
must_haves++;
if(must_haves > length)
{
printf("Sorry, password is not possible\n");
free(temp_pass);
return;
}
position = rand()%length;
while(temp_pass[position] != 0) position = rand()%length;
temp_pass[position] = (rand()%10) + '0';
}
if(input != 'N')
{
allowed[allowed_len] = '3';
allowed_len++;
}
printf("Do you want your password to have special symbols? (Y/N/M) : ");
scanf("%c", &input);
fflush(stdin);
if(input == 'Y')
{
must_haves++;
if(must_haves > length)
{
printf("Sorry, password is not possible\n");
free(temp_pass);
return;
}
position = rand()%length;
while(temp_pass[position] != 0) position = rand()%length;
temp_pass[position] = special_symbols[(rand()%32)];
}
if(input != 'N')
{
allowed[allowed_len] = '4';
allowed_len++;
}
if(allowed_len == 0)
{
printf("Sorry, password is not possible\n");
free(temp_pass);
return;
}
for(int i = 0 ; i < length ; i++)
{
if(temp_pass[i] == 0)
{
position = rand()%allowed_len;
if(allowed[position] == '1')
temp_pass[i] = (rand()%26) + 'a';
if(allowed[position] == '2')
temp_pass[i] = (rand()%26) + 'A';
if(allowed[position] == '3')
temp_pass[i] = (rand()%10) + '0';
if(allowed[position] == '4' && i != length - 1)
temp_pass[i] = special_symbols[rand()%33];
if(allowed[position] == '4')
temp_pass[i] = special_symbols[rand()%32];
}
}
printf("Generated password : \n%s\n", temp_pass);
printf("Do you want to copy password to clipboard? (Y/N)\n");
char check = getchar();
fflush(stdin);
if(check == 'Y')
{
copy_text_to_clipboard(temp_pass);
printf("<SUCCESSFULLY COPIED TO CLIPBOARD>\n");
}
free(temp_pass);
}
char KeyGen(char* sample)
{
unsigned int ret = 0;
int length = strlen(sample);
for(int i = 0 ; i < length ; i++)
{
ret += sample[i] + length * i;
}
return ret%255+33;
}
char* Encrypt(char* sample, char key)
{
int length = strlen(sample);
for(int i = 0 ; i < length-2 ; i++)
sample[i] = sample[i] ^ key;
return sample;
}<file_sep>/eventlist.h
// preprocessor definitions
#define EVENTLEN 150
// global varibles
/*
https://www.jagranjosh.com/general-knowledge/important-days-events-and-festivals-of-february-1549021274-1
https://www.un.org/en/observances/international-days-and-weeks
https://www.rosecalendars.co.uk/resource/notable-dates
https://www.calendarr.com/united-states/calendar-2022/
*/
// fix it for leap years!!!!
// struct arr[year][daysgone]
char special_days[367][EVENTLEN] = {
[366] = "(29/2) 29th February, only comes every 4 years :D",
[1] = "(1/1) First day of the year!!!! Also \"Global family day\"",
[4] = "(4/1) World Braille Day!",
[6] = "(6/1) World Day of War Orphans",
[35] = "(4/2) World Cancer Day",
[43] = "(12/2) Darwin Day (Darwin's birthday) also <NAME>'s birthday",
[44] = "(13/2) World Radio Day",
[45] = "(14/2) Valentines Day",
[54] = "(23/2) International Mother language day",
[67] = "(8/3) Womens day",
[73] = "(14/3) Pi day!!!",
[76] = "(17/3) Birthday of Bangabandhu ALSO World children day",
[83] = "(24/3) Tuberculosis day",
[84] = "(25/3) The black night for bangalees",
[85] = "(26/3) Independence day",
[91] = "(1/4) Aprils fool day",
[104] = "(14/4) Bangla Noboborsho",
[121] = "(1/5) May day",
[127] = "(7/5) Rabindranath's Birthday",
[128] = "(8/5) Mothers day",
[144] = "(24/5) Nazrul's Birthday",
// 3rd sunday of june is fathers day
[172] = "(21/6) Longest day of the year",
[185] = "(4/7) 4th of july...special I guess?",
[219] = "(7/8) Rabindranath's death day",
[227] = "(15/8) Bangabandhu's death day",
[241] = "(29/8) Nazrul's death day",
[264] = "(21/9) World peace day",
[272] = "(29/9) World heart day",
[297] = "(24/10) United Nations day",
[304] = "(31/10) Halloweeeeeeeen ~~ mhuahahaha",
[316] = "(12/11) Pneumonia day",
[325] = "(21/11) Armed forces day",
[348] = "(14/12) Martyred Intellectuals Day",
[350] = "(16/12) Victory day",
[356] = "(22/12) Shortest day",
[358] = "(24/12) Christmas eve",
[359] = "(25/12) Christmas day",
[365] = "(31/12) Last day of the year",
};
// function prototypes
int days_gone(int m, int d); //will return the 'th' day of the year but returns 366 for feb29
int christmas_music();
// function definitions
int days_gone(int m, int d)
{
if(m == 2 && d == 29)
return 366;
int ret = 0;
if(m > 1) ret += 31;
if(m > 2) ret += 28;
if(m > 3) ret += 31;
if(m > 4) ret += 30;
if(m > 5) ret += 31;
if(m > 6) ret += 30;
if(m > 7) ret += 31;
if(m > 8) ret += 31;
if(m > 9) ret += 30;
if(m > 10) ret += 31;
if(m > 11) ret += 30;
ret += d;
return ret;
}
int christmas_music()
{
enum _music{
half = 350,
C4 = 261, D4 = 293, E4 = 329, F4 = 349, G4 = 392, A4 = 440,
B4 = 493, C5 = 523, D5 = 587, E5 = 659, F5 = 698, G5 = 783,
A5 = 880, B5 = 987
};
Beep(E5, half);
printf("Jingle ");
Beep(E5, half);
printf("bell ");
Beep(E5, half);
Sleep(half);
Beep(E5, half);
printf("Jingle ");
Beep(E5, half);
printf("bell ");
Beep(E5, half);
Sleep(half);
Beep(E5, half);
printf("jingle ");
Beep(G5, half);
printf("all ");
Beep(C5, half);
printf("the ");
Beep(D5, half);
printf("way\n");
Beep(E5, half);
Beep(C4, half);
Beep(D4, half);
Beep(E4, half);
printf("Oh ");
Beep(F5, half);
printf("what ");
Beep(F5, half);
printf("fun ");
Beep(F5, half);
Sleep(half);
printf("it ");
Beep(F5, half);
printf("is ");
Beep(E5, half);
printf("to ride ");
Beep(E5, half);
Sleep(half);
printf("a ");
Beep(E5, half);
printf("one ");
Beep(D5, half);
printf("horse ");
Beep(D5, half);
printf("open ");
Beep(E5, half);
printf("sleigh");
Beep(D5, half);
Sleep(half);
Beep(G5, half);
}
<file_sep>/Main_log.h
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <time.h>
#include <ctype.h>
#include <conio.h>
#include <windows.h>
#include <dir.h>
#include <unistd.h> //unneeded
#include <sys/stat.h> //unneeded
#include <errno.h> //unneeded
#include <process.h> // unneeded
// preprocessor defines
#define MAINFILENAME "Manager.bin"
#define README "Read me.txt"
#define USERLEN 50 +2
#define PASSLEN 50 +2
#define QUESLEN 100 +2
#define ANSLEN 100 +2
#define HINTLEN 100 +2
#define MAXFILENAMELEN 15 +2
// global variables
int current_size = 0;
int current_capacity = 4;
int month, date, year, day;
char* read_me_text = "-----------IDEA----------\n"
"This is a very generic and simple management application.\n\n"
"----------WARNING---------\n"
"This software uses a lot of files.\n"
"So, be sure to put it in a separate folder.\n\n"
"----------How to use---------\n"
"1.Follow the on screen instructions\n"
"2.To give input use enter/return key of the keyboard\n"
"3.Everything is case sensitive here\n"
"4.Remember to read what the prompt asks you to do\n"
"5.In diary mode you can use tilde(`) to toggle between modes\n"
"6.Press Esc to finish entry in diary\n"
"7.In diary press LShift+Tilde(~) for highlight mode\n"
"8.You can have maximum 50 list entries with highest 100 length strings\n\n";
// structures
typedef struct _data{
char user[USERLEN];
char password[<PASSWORD>];
char question[QUESLEN];
char answer[ANSLEN];
char hint[HINTLEN];
} data;
// function prototypes
data* InsertUser(data arr[], char* user, char* password, char* question, char* answer, char* hint);
int FindUser(data arr[], char* user);
void RemoveUser(data arr[], int index);
void SerializeMainFile(data arr[], FILE* fp);
data* DeserializeMainFile(FILE* fp);
void ListAllUsers(data arr[]);
char KeyGen(char* sample);
void CreateReadMeFile();
char* Encrypt(char* sample, char key);
bool is_substring(const char small[], const char large[]);
char* num_to_string(char* destination, int number);
void copy_text_to_clipboard(char* txt_to_cpy);
void normalize_file(char* file_name);
void hide_file(char* file_name);
void read_only_file(char* file_name);
void generate_time(void);
void swap_string(char* a, char* b);
// taken from todolist
int isLeapYear(int );
// function definitions
char* num_to_string(char* destination, int number)
{
int len = 0;
do
{
destination[len] = number%10 + '0';
len++;
number /= 10;
} while (number);
//now to reverse it
char temp;
for(int i = 0 ; i < (len+1)/2 ; i++)
{
temp = destination[i];
destination[i] = destination[len-1-i];
destination[len-1-i] = temp;
}
//null character
destination[len] = 0;
return destination;
}
bool is_substring(const char small[], const char large[])
{
int slen = strlen(small);
int llen = strlen(large);
for(int i = 0 ; i < llen ; i++)
{
for(int j = 0 ; j < slen ; j++)
{
char s, l;
s = *(small+j);
l = *(large+i+j);
if(isalpha(s)) s = tolower(s);
if(isalpha(l)) l = tolower(l);
if(l != s)
break;
if(j == slen-1)
return true;
}
}
return false;
}
data* InsertUser(data arr[], char* user, char* password, char* question, char* answer, char* hint)
{
if(current_size == 0)
{
arr = (data*) malloc(current_capacity * sizeof(data));
}
current_size++;
if(current_size >= current_capacity)
{
current_capacity *= 2;
arr = (data*) realloc(arr, current_capacity * sizeof(data));
if(arr == NULL)
{
fprintf(stderr, "Could not allocate enough memory...Exiting\n");
exit(EXIT_FAILURE);
}
}
strcpy(arr[current_size-1].password, <PASSWORD>);
strcpy(arr[current_size-1].user, user);
strcpy(arr[current_size-1].question, question);
strcpy(arr[current_size-1].answer, answer);
strcpy(arr[current_size-1].hint, hint);
return arr;
}
int FindUser(data arr[], char* user)
{
int ret = -1;
for(int i = 0 ; i < current_size ; i++)
{
if(!strcmp(arr[i].user, user))
{
ret = i;
break;
}
}
return ret;
}
void RemoveUser(data arr[], int index)
{
char file_name[MAXFILENAMELEN] = "User", file_name_extension[MAXFILENAMELEN] = {};
char temp_file_name[MAXFILENAMELEN] = "User", temp_file_name_extension[MAXFILENAMELEN] = {};
strcat(file_name, num_to_string(file_name_extension, index));
strcat(temp_file_name, num_to_string(temp_file_name_extension, current_size-1));
arr[index] = arr[current_size-1];
rename(temp_file_name, file_name);
current_size--;
if(current_size == 0)
free(arr);
else if(current_size < current_capacity/4)
{
current_capacity /= 4;
arr = (data*) realloc(arr, current_capacity * sizeof(data));
if(arr == NULL)
{
fprintf(stderr, "Something went wrong...Exiting\n");
exit(EXIT_FAILURE);
}
}
}
void SerializeMainFile(data arr[], FILE* fp)
{
if(fp == NULL)
{
fprintf(stderr, "Could not save/create file...Exiting\n");
exit(EXIT_FAILURE);
}
for(int i = 0 ; i < current_size ; i++)
{
char key = KeyGen(arr[i].user);
fwrite(&key, sizeof(key), 1, fp);
Encrypt(arr[i].user, key);
Encrypt(arr[i].password, key);
Encrypt(arr[i].question, key);
Encrypt(arr[i].answer, key);
Encrypt(arr[i].hint, key);
fwrite(&arr[i], sizeof(arr[0]), 1, fp);
}
fclose(fp);
}
data* DeserializeMainFile(FILE* fp)
{
data* arr;
current_size = 0;
current_capacity = 4;
while(1)
{
char user[USERLEN], password[<PASSWORD>], question[QUESLEN], answer[ANSLEN], hint[HINTLEN];
char key;
int check = fread(&key, sizeof(key), 1, fp);
if(check == 0) break;
fread(user, sizeof(user), 1, fp);
fread(password, sizeof(password), 1, fp);
fread(question, sizeof(question), 1, fp);
fread(answer, sizeof(answer), 1, fp);
fread(hint, sizeof(hint), 1, fp);
Encrypt(user, key);
Encrypt(password, key);
Encrypt(question, key);
Encrypt(answer, key);
Encrypt(hint, key);
arr = InsertUser(arr, user, password, question, answer, hint);
}
fclose(fp);
return arr;
}
void ListAllUsers(data arr[])
{
for(int i = 0 ; i < current_size ; i++)
{
printf("Username : %s", arr[i].user);
}
}
void CreateReadMeFile()
{
FILE* fp = fopen(README, "r");
if(fp != NULL) fclose(fp);
else
{
fclose(fp);
fp = fopen(README, "w");
fprintf(fp, read_me_text);
fclose(fp);
}
}
void copy_text_to_clipboard(char* txt_to_cpy)
{
OpenClipboard(0);
EmptyClipboard();
size_t l = strlen(txt_to_cpy) + 1;
HGLOBAL h = GlobalAlloc(GMEM_MOVEABLE, l);
memcpy(GlobalLock(h), txt_to_cpy, l);
GlobalUnlock(h);
SetClipboardData(CF_TEXT, h);
CloseClipboard();
}
void normalize_file(char* file_name)
{
int attr = GetFileAttributes(file_name);
SetFileAttributes(file_name, FILE_ATTRIBUTE_NORMAL);
}
void hide_file(char* file_name)
{
int attr = GetFileAttributes(file_name);
SetFileAttributes(file_name, (attr | FILE_ATTRIBUTE_HIDDEN));
}
void read_only_file(char* file_name)
{
int attr = GetFileAttributes(file_name);
SetFileAttributes(file_name, (attr | FILE_ATTRIBUTE_READONLY));
}
void generate_time(void)
{
month = day = year = date = -1; //initializing
time_t current_time;
time(¤t_time);
char mm[4] = {}, dd[3] = {}, yyyy[5] = {}, ww[4] = {};
char* temp = ctime(¤t_time);
char month_names[12][4] = {"Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"};
char week[7][4] = {"Sat", "Sun", "Mon", "Tue", "Wed", "Thu", "Fri"};
//separating from the string
for(int i = 0 ; i < 3 ; i++)
ww[i] = temp[i];
for(int i = 4 ; i < 4+3 ; i++)
mm[i-4] = temp[i];
for(int i = 8 ; i < 8+2 ; i++)
dd[i-8] = temp[i];
for(int i = 20 ; i < 20+4 ; i++)
yyyy[i-20] = temp[i];
for(int i = 0 ; i < 12 ; i++)
{
if(!strcmp(mm, month_names[i]))
month = i+1;
}
for(int i = 0 ; i < 7 ; i++)
{
if(!strcmp(ww, week[i]))
day = i+1;
}
date = (dd[1]-'0') + (dd[0]-'0')*10;
year = yyyy[3]-'0' + (yyyy[2]-'0')*10 + (yyyy[1]-'0')*100 + (yyyy[0]-'0')*1000;
}
void swap_string(char* a, char* b)
{
char* temp;
int len = strlen(a);
if(strlen(b) > len)
len = strlen(b);
temp = (char*) calloc(len +1, sizeof(char));
strcpy(temp, a);
strcpy(a, b);
strcpy(b, temp);
free(temp);
}<file_sep>/ToDOList.h
#include <stdio.h>
#include <stdbool.h>
#include <string.h>
#include <stdlib.h>
#include <conio.h>
#include <windows.h>
#define NAMELENGTH 200
#define DESCLENGTH 800
#define TODOLIST "ToDo-List.txt"
// Global Variables
int TotalTask = 0;
int taskno = 0;
int capacity = 4;
// int mm, dd, yy, date, num, leapyear;
int num, leapyear;
char *months[] =
{
"\n\n ----------January ----------",
"\n\n ----------February ----------",
"\n\n ----------March ----------",
"\n\n ----------April ----------",
"\n\n ----------May ----------",
"\n\n ----------June ----------",
"\n\n ----------July ----------",
"\n\n ----------August ----------",
"\n\n ----------September ----------",
"\n\n ----------October ----------",
"\n\n ----------November ----------",
"\n\n ----------December ----------"
};
// find shortcut (idea)
typedef struct new_entry
{
char taskname[NAMELENGTH];
char Description[DESCLENGTH];
int d;
int m;
int y;
int countdays; // using days gone
bool mark_as_done;
} newentry;
newentry *node;
//Function Prototypes
int dayMove(void);
void calendar(void);
int calendarView(void);
newentry* ADD_LIST (void);
newentry* REMOVE_LIST(void);
void SERIALIZE_LIST (void);
void SHOW_ALL_LIST (void);
void SHOW_LIST(int);
void DESERIALIZE_LIST (void);
void ADD_INFO ( char taskname[NAMELENGTH] , char description[DESCLENGTH] , int d , int m , int y , bool mark_as_done);
newentry* EDIT_ENTRY(void);
newentry* SWITCH_TO_EDIT (int);
void Mark_Done (void);
void SORT_LIST (void);
// void Progress_Bar(void);
// void Show_Bar (int);
int daysGone (int, int, int);
int yeargone (int, int, int);
void SEARCH_ENTRY();
int todo(void);
// definitions
int dayMove()
{
int flag_1, flag_2, flag_3;
flag_1 = (year - 1) / 4;
flag_2 = (year - 1) / 100;
flag_3 = (year - 1) / 400;
num = (year + flag_1 - flag_2 + flag_3) % 7;
return num;
}
void calendar(void)
{
int i = 1, mLoop, flag, Lday;
for (mLoop = 0; mLoop < 12; mLoop++)
{
if (mLoop + 1 == month || mLoop == month)
printf("%s", months[mLoop]); // printing month name
if (mLoop + 1 == month || mLoop == month)
printf("\n\nSun Mon Tue Wed Thu Fri Sat\n");
for (Lday = 1; Lday <= 1 + num * 5; Lday++) // fixing orientation
{
if (mLoop + 1 == month || mLoop == month)
printf(" ");
}
for (Lday = 1; Lday <= dayNum[mLoop]; Lday++) // printing dates
{
flag = 0;
for (int j = 0; j < taskno; j++)
{
if (year == node[j].y && i == daysGone(node[j].d, node[j].m, node[j].y))
{
if(node[j].mark_as_done == true) // fix this
{
color(theme);
if(j != taskno-1) continue;
}
else color(selected);
if (mLoop + 1 == month || mLoop == month)
printf("%2d", Lday);
i++;
flag = 1;
color(not_selected);
break;
}
}
if (!flag)
{
if (mLoop + 1 == month || mLoop == month)
printf("%2d", Lday);
i++;
}
if (mLoop + 1 == month || mLoop == month)
{
if ((Lday + num) % 7 > 0)
printf(" ");
else
printf("\n ");
}
}
num = (num + dayNum[mLoop]) % 7;
}
}
int calendarView(void)
{
color(not_selected);
num = dayMove();
printf("Today : %2.2d/%2.2d/%4.4d", date, month, year);
isLeapYear(year);
calendar();
printf("\n");
system("pause");
return 0;
}
void SERIALIZE_LIST (void)
{
FILE *fp = fopen ( TODOLIST , "wb" );
for ( int I = 0 ; I < taskno ; I++ )
fwrite ( &node[I] , sizeof(node[0]) , 1 , fp );
fclose(fp);
}
void DESERIALIZE_LIST (void)
{
taskno = 0;
capacity = 4;
FILE *fp = NULL;
fp = fopen (TODOLIST , "rb");
if (fp == NULL)
return;
while(1)
{
newentry Temp;
int check = fread(&Temp , sizeof(Temp) , 1 , fp);
if (check == 0)
break;
ADD_INFO (Temp.taskname , Temp.Description , Temp.d , Temp.m , Temp.y, Temp.mark_as_done);
}
fclose(fp);
}
void ADD_INFO ( char taskname[NAMELENGTH] , char description[DESCLENGTH] , int d , int m , int y , bool mark_as_done)
{
strcpy(node[taskno].taskname , taskname);
strcpy(node[taskno].Description , description);
node[taskno].d = d;
node[taskno].m = m;
node[taskno].y = y;
node[taskno].countdays = yeargone(d,m,y);
node[taskno].mark_as_done = mark_as_done;
taskno++;
TotalTask = taskno;
if(taskno == capacity)
{
capacity *= 2;
node = (newentry*) realloc(node, capacity * sizeof(newentry));
}
// return;
}
// add korle length barbe capacity barbe length kombe remove entry er shomoy
newentry* ADD_LIST (void)
{
char taskname[NAMELENGTH] , description[DESCLENGTH];
int d , m , y;
system("cls");
printf("Task Name : ");
fgets(taskname,NAMELENGTH,stdin);
printf("Description : ");
fgets(description,DESCLENGTH,stdin);
printf("Deadline (dd/mm/yyyy): ");
scanf("%d / %d / %d", &d , &m , &y);
getchar();
if(d < 1 || m < 1 || ((m != 2 && d > dayNum[m-1]) || (m == 2 && isLeapYear(y) && d > 29) || (m == 2 && d > 28)) || m > 12)
{
printf("Invalid time!\n");
system("pause");
return NULL;
}
// newentry node;
if ( taskno == 0 )
{
node = (newentry*) realloc (node , capacity * sizeof(newentry));
}
taskno++;
TotalTask = taskno;
if(taskno >= capacity)
{
capacity *= 2;
node = (newentry*) realloc ( node , capacity * sizeof(newentry));
if (node == NULL)
{
fprintf(stderr, "Could not allocate enough memory ... Exiting\n");
exit(EXIT_FAILURE);
}
}
strcpy( node[taskno-1].taskname , taskname );
strcpy( node[taskno-1].Description , description );
node[taskno-1].d = d;
node[taskno-1].m = m;
node[taskno-1].y = y;
node[taskno-1].countdays = yeargone(d,m,y);
node[taskno-1].mark_as_done = false;
SORT_LIST();
SERIALIZE_LIST();
return node;
}
newentry* EDIT_ENTRY(void)
{
int indx_to_edit;
printf("Enter the index No you want to edit\n");
scanf("%d",&indx_to_edit);
getchar();
if(indx_to_edit < 1 || indx_to_edit > TotalTask)
{
printf("Task number does not exist\n");
system("pause");
return NULL;
}
puts("\n");
node = SWITCH_TO_EDIT(indx_to_edit);
SORT_LIST();
SERIALIZE_LIST();
return node;
}
newentry* SWITCH_TO_EDIT(int indx_to_edit)
{
printf("What you want to Edit?\n");
printf("\t1. Task Name\n");
printf("\t2. Description\n");
printf("\t3. Time\n");
printf("\t4. All\n");
int switch_case;
scanf("%d",&switch_case);
getchar();
switch(switch_case)
{
case 1:
{
char taskname[NAMELENGTH];
printf("Task Name : ");
fgets(taskname, NAMELENGTH , stdin);
strcpy(node[indx_to_edit-1].taskname , taskname);
}
break;
case 2:
{
char DESCRIPTION[DESCLENGTH];
printf("Description : ");
fgets(DESCRIPTION, DESCLENGTH , stdin);
strcpy(node[indx_to_edit-1].Description , DESCRIPTION);
}
break;
case 3:
{
int d, m , y;
scanf("%d / %d / %d",&d ,&m ,&y);
getchar();
if(d < 1 || m < 1 || ((m != 2 && d > dayNum[m-1]) || (m == 2 && isLeapYear(y) && d > 29) || (m == 2 && d > 28)) || m > 12)
{
printf("Invalid time!\n");
system("pause");
return NULL;
}
node[indx_to_edit-1].d = d;
node[indx_to_edit-1].m = m;
node[indx_to_edit-1].y = y;
node[indx_to_edit-1].countdays = yeargone(d,m,y);
}
break;
case 4:
{
char taskname[NAMELENGTH] , description[DESCLENGTH];
int d , m , y;
system("cls");
printf("Task Name : ");
fgets(taskname,NAMELENGTH,stdin);
printf("\nDescription : ");
fgets(description,DESCLENGTH,stdin);
printf("Deadline (dd/mm/yyyy): ");
scanf("%d / %d / %d", &d , &m , &y);
getchar();
if(d < 1 || m < 1 || ((m != 2 && d > dayNum[m-1]) || (m == 2 && isLeapYear(y) && d > 29) || (m == 2 && d > 28)) || m > 12)
{
printf("Invalid time!\n");
system("pause");
return NULL;
}
strcpy( node[indx_to_edit-1].taskname , taskname );
strcpy( node[indx_to_edit-1].Description , description );
node[indx_to_edit-1].d = d;
node[indx_to_edit-1].m = m;
node[indx_to_edit-1].y = y;
node[indx_to_edit-1].countdays = yeargone(d,m,y);
}
break;
default:
{
printf("\n\tInvalid Input!\t\n");
}
break;
}
return node;
}
newentry* REMOVE_LIST(void)
{
// system("cls");
int indx_to_remove;
printf("Enter the index No you want to remove\n");
scanf("%d",&indx_to_remove);
getchar();
if(indx_to_remove>taskno || indx_to_remove<1)
{
printf("Unable to find Task at index no %d\n\n",indx_to_remove);
printf("Press Any Key to Continue\n");
getchar();
return node;
}
node [indx_to_remove-1] = node[taskno-1];
taskno--;
TotalTask = taskno;
SORT_LIST();
SERIALIZE_LIST();
printf("%d no task successfully removed from your To Do List%c\n\n",indx_to_remove , 3);
printf("Press Any Key to Continue\n");
getchar();
return node;
}
void SHOW_ALL_LIST (void)
{
char ch;
int no;
// SORT_LIST();
system("cls");
for (int I=0 ; I<taskno ; I++)
{
// if(node[I].mark_as_done)
printf("%d.\t",I+1);
printf("Task Name : %s",node[I].taskname);
printf("\tTime : %2.2d/%2.2d/%4.4d\n",node[I].d, node[I].m, node[I].y);
}
printf("\n");
if(TotalTask>0)
{
printf("Do you want to view any task? (y/n)\n");
scanf("%c",&ch);
getchar();
}
else
{
printf("No entries present currently\n");
printf("Press Any Key to Continue\n");
getchar();
}
if(ch=='y')
{
printf("Enter the no of Task : ");
scanf("%d",&no);
getchar();
SHOW_LIST(no);
printf("Press Any Key to Continue\n");
getchar();
}
}
void SHOW_LIST(int No_of_Task)
{
printf("\n");
printf("\tTask Name : %s",node[No_of_Task-1].taskname);
printf("\tDescription : %s",node[No_of_Task-1].Description);
printf("\tTime : %2.2d/%2.2d/%4.4d\n",node[No_of_Task-1].d, node[No_of_Task-1].m, node[No_of_Task-1].y);
}
void Mark_Done(void)
{
int index;
printf("\tEnter the Index No of Task you have completed : ");
scanf("%d",&index);
getchar();
if((index-1) > TotalTask || index < 1)
{
printf("Entry Not Found!\n");
return;
}
if(node[index-1].mark_as_done)
{
printf("This task was already done\n");
return;
}
node[index-1].mark_as_done = true;
char tempname[NAMELENGTH];
strcpy(tempname, node[index-1].taskname);
tempname[strlen(tempname)-1] = 0;
strcat(tempname, " (Done)\n");
strcpy(node[index-1].taskname, tempname);
printf("\n\t\tTask No %d Marked As Done\n",index);
printf("\nPress Any Key to Continue\n");
getchar();
SERIALIZE_LIST();
}
void Progress_Bar(void)
{
int completed_task = 0;
// system("color 0A");
for (int I=0 ; I<TotalTask ; I++)
{
if(node[I].mark_as_done)
completed_task++;
}
Show_Bar(completed_task);
}
void Show_Bar (int completed_task)
{
// Initialize Character for Printing
char a = 177 , b = 219;
double pgsn = (((double)completed_task / (double)TotalTask) * 50);
int progression = pgsn;
gotoxy(39, 19);
printf("Progression...(%d/%d)",completed_task ,TotalTask);
gotoxy(24, 20);
// Print Initial Loading Bar
for (int I=0 ; I<50 ; I++)
{
if(I<progression)
printf("%c", b);
else
printf("%c", a);
}
// Set The Curson Again at starting point of progression bar
// printf("\r");
// printf("\t\t\t\t\t"
// printf("\n");
// system("pause");
}
void SORT_LIST (void)
{
for ( int I=0 ; I<taskno-1 ; I++ )
{
int FLAG = 0;
for ( int J=0 ; J<taskno-1-I ; J++ )
{
if((node[J].countdays > node[J+1].countdays))
{
newentry temp = node[J];
node[J] = node[J+1];
node[J+1] = temp;
FLAG = 1;
}
}
if (FLAG == 0)
break;
}
SERIALIZE_LIST();
}
int isLeapYear(int y)
{
if (y % 4 == 0 && y % 100 != 0 || y % 400 == 0)
{
dayNum[1] = 29;
return 1;
}
else
{
dayNum[1] = 28;
return 0;
}
}
int daysGone(int d, int m, int y)
{
int jan = 31, feb = 28, mar = 31, apr = 30, may = 31, jun = 30, jul = 31, aug = 31, sep = 30, oct = 31, nov = 30;
int daysGone = 0;
if (isLeapYear(y))
{
feb = 29;
}
switch (m)
{
case 12:
daysGone = nov;
case 11:
daysGone += oct;
case 10:
daysGone += sep;
case 9:
daysGone += aug;
case 8:
daysGone += jul;
case 7:
daysGone += jun;
case 6:
daysGone += may;
case 5:
daysGone += apr;
case 4:
daysGone += mar;
case 3:
daysGone += feb;
case 2:
daysGone += jan;
case 1:
daysGone += d;
}
return daysGone;
}
int yeargone (int d, int m, int y) // nott for real
{
return (daysGone(d,m,y) + ((y-2000)*365));
}
void SEARCH_ENTRY()
{
int d, m, y;
printf("Give the date (dd/mm/yyyy) : ");
scanf("%d / %d / %d", &d, &m, &y);
getchar();
if(d < 1 || m < 1 || ((m != 2 && d > dayNum[m-1]) || (m == 2 && isLeapYear(y) && d > 29) || (m == 2 && d > 28)) || m > 12)
{
printf("Invalid time!\n");
system("pause");
return;
}
int y_val = yeargone(d, m, y);
bool flag = 0;
for(int I = 0 ; I < taskno ; I++)
{
if(node[I].countdays == y_val)
{
if(flag == 0) printf("ENTRY FOUND\n");
flag = 1;
printf("Task name: %s"
"Description: %s", node[I].taskname, node[I].Description);
}
}
if(flag == 0) printf("NOT FOUND\n");
system("pause");
}
int todo(void)
{
node = (newentry*) malloc (capacity * sizeof(newentry));
int NUMBER;
// int index = 0;
while(true)
{
system("cls");
// printf("Choose One:\n");
// printf("\t1. Add Entry\n");
// printf("\t2. Edit Entry\n");
// printf("\t3. Search entry\n");
// printf("\t4. Remove Entry\n");
// printf("\t5. Show All List\n");
// printf("\t6. Mark As Done\n");
// printf("\t7. Exit To-Do List\n");
// printf("\n");
// scanf("%d",&NUMBER);
// getchar();
DESERIALIZE_LIST();
NUMBER = todo_menu();
// Progress_Bar();
switch (NUMBER)
{
case 1:
{
ADD_LIST();
// index++;
}
break;
case 2:
{
EDIT_ENTRY();
}
break;
case 3:
{
SEARCH_ENTRY();
}
break;
case 4:
{
REMOVE_LIST();
}
break;
case 5:
{
SHOW_ALL_LIST();
}
break;
case 6:
{
Mark_Done();
}
break;
case 7:
{
calendarView();
}
break;
case 8:
{
return 0;
}
default:
{
printf("\n\tInvalid Input!\t\n");
}
break;
}
}
free(node);
return 0;
}
<file_sep>/diary.h
// preprocessor declarations
#define listMaxIndex 50
#define maxListLength 100
#define SUBLEN 100 +2
#define DIARY "diaryinfo.bin"
// variables to store file names (doesnt have to be global)
char mainfile[MAXFILENAMELEN];
char listfile[MAXFILENAMELEN];
// global typedefs and structures
typedef struct _info{
char subject[SUBLEN];
int d, m, y;
} info;
// global variables
int databaseLength = 0, databaseCapacity = 4; // main array
int textLength = 0, textCapacity = 50; // main body informations
char* text; // main body
int listLast = 0, listLength[listMaxIndex]; // length of list indx
char list[listMaxIndex][maxListLength]; // list array
enum _color_codes highlight_color, normal_color;
bool highlight_mode = 0;
info* database;
// taken from todolist
int dayNum[] = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
// function prototypes
int isTagged();
void removeTag(int tag_type);
void serializeDiary();
void deserializeDiary();
void addInfo(char subject[SUBLEN], int d, int m, int y);
int removeInfo(int indx);
void listEntries();
void userScan();
void userListScan();
void searchDiary();
bool diarySubstringCheck(char* large, char* small);
void scan_entry(int indx);
void edit_list(int indx);
void add_list();
void remove_list(int indx);
void remove_nth_listend_tag_from_body(int indx, int n);
void print_list_only();
void serialize_list_only(FILE* fp);
bool check_list_endtag(char* s);
void remove_list_endtag(char* s);
int frontTagged(FILE* fp, FILE* ivan);
void readFromFile(int indx);
void readListFromFile(FILE* ivan);
int diary(void)
{
database = (info*) malloc(databaseCapacity * sizeof(info));
text = (char*) malloc(textCapacity * sizeof(char));
normal_color = not_selected;
highlight_color = selected;
while(1)
{
deserializeDiary();
char command = diary_menu();
color(normal_color);
if(command == '7') break;
if(command == '1')
{
printf("Today is %2.2d/%2.2d/%4.4d\n", date, month, year);
printf("Date (format dd/mm/yyyy): ");
int d, m, y;
scanf("%d / %d / %d", &d, &m, &y);
fflush(stdin);
if(d < 1 || m < 1 || ((m != 2 && d > dayNum[m-1]) || (m == 2 && isLeapYear(y) && d > 29) || (m == 2 && d > 28)) || m > 12)
{
printf("Invalid time!\n");
system("pause");
continue;
}
char subject[SUBLEN] = {};
printf("Subject: ");
fgets(subject, SUBLEN, stdin);
addInfo(subject, d, m, y);
printf("------------------>> ENTRY NO %d <<--------------------\n", databaseLength);
serializeDiary();
userScan();
mainfile[0] = listfile[0] = 0;
num_to_string(mainfile, databaseLength-1);
strcpy(listfile, mainfile);
strcat(mainfile, ".txt");
strcat(listfile, "list.txt");
FILE* fp = fopen(mainfile, "w");
fprintf(fp,"%s", text);
fclose(fp);
fp = fopen(listfile, "w");
for(int i = 0 ; i < listLast-1 ; i++)
fprintf(fp,"%s\n", list[i]);
fprintf(fp, "%s", list[listLast-1]);
fclose(fp);
printf("\n");
}
if(command == '2')
{
printf("Give the number to load : ");
int indx;
scanf("%d", &indx);
fflush(stdin);
if(indx < 1 || indx > databaseLength)
{
printf("<ENTRY DOESNT EXIST>\n");
system("pause");
continue;
}
indx--;
printf("Date: %2.2d/%2.2d/%4.4d\n", database[indx].d, database[indx].m, database[indx].y);
printf("Subject: %s", database[indx].subject);
printf("------------------------------------------------\n");
readFromFile(indx);
printf("\n");
}
if(command == '3')
{
printf("Give the number to delete : ");
int indx;
scanf("%d", &indx);
fflush(stdin);
if(indx < 1 || indx > databaseLength)
{
printf("<ENTRY DOESNT EXIST>\n");
system("pause");
continue;
}
indx--;
removeInfo(indx);
printf("<SUCCESSFULLY REMOVED THIS ENTRY>\n");
}
if(command == '4')
listEntries();
if(command == '5')
{
printf("Which entry would you like to edit? Ans: ");
int indx;
scanf("%d", &indx);
fflush(stdin);
if(indx < 1 || indx > databaseLength)
{
printf("<ENTRY DOESNT EXIST>\n");
system("pause");
continue;
}
indx--;
edit_list(indx);
}
if(command == '6')
searchDiary();
system("pause");
}
free(text);
free(database);
return 0;
}
int isTagged()
{
char checker[5] = {};
for(int i = textLength-3 ; i < textLength ; i++)
checker[i-textLength+3] = text[i];
if(!strcmp(checker, "<h>"))
return 1;
if(!strcmp(checker, "<l>"))
return 3;
for(int i = textLength-4 ; i < textLength ; i++)
checker[i-textLength+4] = text[i];
if(!strcmp(checker, "</h>"))
return 2;
if(!strcmp(checker, "</l>"))
return 4;
return 0;
}
void removeTag(int tag_type)
{
if(tag_type == 1 || tag_type == 2)
{
for(int i = textLength-3 ; i < textLength ; i++)
text[i] = 0;
textLength -= 3;
}
else if(tag_type == 3 || tag_type == 4)
{
for(int i = textLength-4 ; i < textLength ; i++)
text[i] = 0;
textLength -= 4;
}
}
void serializeDiary()
{
FILE* fp = fopen(DIARY, "wb");
for(int i = 0 ; i < databaseLength ; i++)
fwrite(&database[i], sizeof(database[0]), 1, fp);
fclose(fp);
}
void deserializeDiary()
{
databaseCapacity = 4;
databaseLength = 0;
FILE* fp = fopen(DIARY, "rb");
if(fp == NULL)
return;
while(1)
{
info temp;
int check = fread(&temp, sizeof(temp), 1, fp);
if(check == 0)
break;
addInfo(temp.subject, temp.d, temp.m, temp.y);
}
fclose(fp);
}
void addInfo(char subject[SUBLEN], int d, int m, int y)
{
strcpy(database[databaseLength].subject, subject);
database[databaseLength].d = d;
database[databaseLength].m = m;
database[databaseLength].y = y;
databaseLength++;
if(databaseLength == databaseCapacity)
{
databaseCapacity *= 2;
database = (info*) realloc(database, databaseCapacity * sizeof(info));
}
}
int removeInfo(int indx)
{
databaseLength--;
char entry_name[MAXFILENAMELEN] = {};
char list_name[MAXFILENAMELEN] = {};
num_to_string(entry_name, indx);
strcpy(list_name, entry_name);
strcat(entry_name, ".txt");
strcat(list_name, "list.txt");
remove(entry_name);
remove(list_name);
char temp_name[MAXFILENAMELEN] = {};
char temp_list_name[MAXFILENAMELEN] = {};
num_to_string(temp_name, databaseLength);
strcpy(temp_list_name, temp_name);
strcat(temp_name, ".txt");
strcat(temp_list_name, "list.txt");
rename(temp_name, entry_name);
rename(temp_list_name, list_name);
database[indx] = database[databaseLength];
serializeDiary();
return 0;
}
void searchDiary()
{
printf("Now we will insert the date range.\n "
"Input a negative value to disable this filter\n");
printf("Input starting date (dd/mm/yyyy): ");
int sd, sm, sy; // start day-month-year
int ed, em, ey; // ending day-month-year
bool dateFilter, subFilter, bodyFilter;
scanf("%d / %d / %d", &sd, &sm, &sy);
fflush(stdin);
if(sd == 0 || sm == 0 || ((sm != 2 && sd > dayNum[sm-1]) || (sm == 2 && isLeapYear(sy) && sd > 29) || (sm == 2 && sd > 28)) || sm > 12)
{
printf("Invalid time!\n");
system("pause");
return;
}
if(sd > 0 && sm > 0 && sy > 0)
{
printf("Input ending date (dd/mm/yyyy): ");
scanf("%d / %d / %d", &ed, &em, &ey);
fflush(stdin);
if(ed < 1 || em < 1 || ((em != 2 && ed > dayNum[em-1]) || (em == 2 && isLeapYear(ey) && ed > 29) || (em == 2 && ed > 28)) || em > 12)
{
printf("Invalid time!\n");
system("pause");
return;
}
dateFilter = 1;
}
else dateFilter = 0;
char sub_search[SUBLEN] = {}, body_search[SUBLEN * 5] = {};
printf("Enter Subject Filter.\nBlank input disables this filter.\n");
printf("Subject (can be a substring) : ");
fgets(sub_search, SUBLEN, stdin);
fflush(stdin);
printf("Enter Body Filter.\nBlank input disables this filter.\n");
printf("Body substring: ");
fgets(body_search, 5 * SUBLEN, stdin);
fflush(stdin);
int* results = (int*) calloc(databaseLength +1 , sizeof(int));
int r = 0;
system("cls");
printf("<SEARCH STARTED!>\n"
"Please wait patiently as this can take a moment\n\n");
int start;
int end;
if(dateFilter)
{
start = days_gone(sm, sd);
if(start == 366) start = 59;
end = days_gone(em, ed);
if(end == 366) end = 59;
}
if(strlen(sub_search) == 1) subFilter = 0;
else subFilter = 1;
if(strlen(body_search) == 1) bodyFilter = 0;
else bodyFilter = 1;
for(int i = 0 ; i < databaseLength ; i++)
{
int current = days_gone(database[i].m, database[i].d);
if(current == 366) current = 59;
if(!dateFilter || (database[i].y > sy || (database[i].y == sy && current >= start))
&& (database[i].y < ey || (database[i].y == ey && current <= end)))
{
if(!subFilter || diarySubstringCheck(database[i].subject, sub_search))
{
scan_entry(i);
bool flag = 0;
if(!bodyFilter || diarySubstringCheck(text, body_search)) flag = 1;
for(int j = 0 ; j < listLast && !flag ; j++)
if(diarySubstringCheck(list[j], body_search))
flag = 1;
if(flag)
{
results[r] = i;
r++;
}
}
}
}
printf("FOUND: %d matches\n\n", r);
for(int i = 0 ; i < r ; i++)
{
printf("Entry no : %d\n"
"Date: %2.2d/%2.2d/%4.4d\n"
"Subject: %s\n\n", results[i]+1, database[results[i]].d, database[results[i]].m, database[results[i]].y, database[results[i]].subject);
}
free(results);
}
bool diarySubstringCheck(char* large, char* small)
{
int slen = strlen(small);
int llen = strlen(large);
for(int i = 0 ; i < llen ; i++)
{
for(int j = 0 ; j < slen ; j++)
{
char s = *(small+j);
char l = *(large+i+j);
while(1)
{
if(j == slen)
return true;
if(i == llen)
return false;
if(!(s == '<' || l == '<' || s == '\n' || l == '\n'))
break;
// skip newlines
while(s == '\n' && j < slen)
{
j++;
s = *(small+j);
}
while(l == '\n' && i < llen)
{
i++;
l = *(large+i+j);
}
// skip tags
while(s == '<' && j < slen)
{
if(*(small+j+1) == 'l' && *(small+j+2) == '>') j += 3;
if(*(small+j+1) == 'h' && *(small+j+2) == '>') j += 3;
if(*(small+j+1) == '/' && *(small+j+2) == 'l' && *(small+j+3) == '>') j += 4;
if(*(small+j+1) == '/' && *(small+j+2) == 'h' && *(small+j+3) == '>') j += 4;
if(j >= slen) return true;
s = *(small+j);
}
while(l == '<' && i < llen)
{
if(*(large+j+i+1) == 'l' && *(large+j+i+2) == '>') i += 3;
if(*(large+j+i+1) == 'h' && *(large+j+i+2) == '>') i += 3;
if(*(large+j+i+1) == '/' && *(large+j+i+2) == 'l' && *(large+j+i+3) == '>') i += 4;
if(*(large+j+i+1) == '/' && *(large+j+i+2) == 'h' && *(large+j+i+3) == '>') i += 4;
if(i >= llen) return false;
l = *(large+i+j);
}
}
if(isalpha(s)) s = tolower(s);
if(isalpha(l)) l = tolower(l);
if(l != s)
break;
if(j >= slen-1)
return true;
}
}
return false;
}
void scan_entry(int indx)
{
char ch;
textCapacity = 50;
text = (char*) realloc(text, textCapacity * sizeof(char));
textLength = 0;
listLast = 0;
// opening the file
mainfile[0] = listfile[0] = 0;
num_to_string(mainfile, indx);
num_to_string(listfile, indx);
strcat(mainfile, ".txt");
strcat(listfile, "list.txt");
FILE* fp = fopen(mainfile, "r");
while(1)
{
if(fscanf(fp, "%c", &ch) == EOF) break;
if(textLength == textCapacity - 1)
{
textCapacity *= 2;
text = (char*) realloc(text, textCapacity * sizeof(char));
}
text[textLength] = ch;
textLength++;
}
fclose(fp);
fp = fopen(listfile, "r");
memset(listLength, 0, listMaxIndex * sizeof(int));
while(1)
{
if(fgets(list[listLast], maxListLength, fp) == NULL) break;
listLength[listLast] = strlen(list[listLast]);
listLast++;
}
fclose(fp);
}
void edit_list(int indx)
{
listfile[0] = 0;
num_to_string(listfile, indx);
strcat(listfile, "list.txt");
FILE* fp = fopen(listfile, "r");
listLast = 0;
memset(listLength, 0, listMaxIndex * sizeof(int));
bool end_of_file = 0;
while(!end_of_file)
{
while(!end_of_file)
{
char ch;
if(fscanf(fp, "%c", &ch) == EOF)
{
end_of_file = 1;
break;
}
if(ch == '\n') break;
list[listLast][listLength[listLast]] = ch;
listLength[listLast]++;
list[listLast][listLength[listLast]] = 0;
}
listLast++;
}
fclose(fp);
if(listLast == 1)
{
printf("Sorry, this entry does not have any list to edit\n"
"You can only edit list if the entry has atleast one list point\n");
return;
}
char input = 0;
while(input != escape)
{
fp = fopen(listfile, "w");
system("cls");
printf("Press esc to exit\n\n");
printf("CURRENT LIST:-\n");
print_list_only();
printf("\n\nWhat would you like to do? Add or remove? (A/R) : ");
input = _getch();
printf("%c \n", input);
if(input == 'A') add_list();
else if(input == 'R') remove_list(indx);
serialize_list_only(fp);
fclose(fp);
}
}
void add_list()
{
printf("Give a float value that is inbetween the two list numbers\n"
"where you want your new list entry to be in\n"
"(For example to add a list between 1 and 2, give 1.5) : ");
double input;
scanf("%lf", &input);
fflush(stdin);
input -= 1;
printf("Give the new list entry: \n");
fgets(list[listLast], maxListLength, stdin);
list[listLast][strlen(list[listLast]) -1] = 0; //discarding the \n
if(input + 1 > listLast)
{
strcat(list[listLast], "</l>");
remove_list_endtag(list[listLast-1]);
}
listLength[listLast] = strlen(list[listLast]);
listLast++;
for(int i = listLast-1 ; i > input+1 && i > 0 ; i--)
{
swap_string(list[i], list[i-1]);
int temp = listLength[i];
listLength[i] = listLength[i-1];
listLength[i-1] = temp;
}
}
void remove_list(int indx)
{
printf("Give the list number you want to remove: ");
int input;
scanf("%d", &input);
fflush(stdin);
input--;
if(input < 0 || input >= listLast)
{
printf("<LIST ELEMENT DOESNT EXIST>\n");
system("pause");
return;
}
// first - last - middle
if(input == 0 && check_list_endtag(list[input]))
{
if(input != listLast-1 && !check_list_endtag(list[input+1]))
{
strcat(list[input+1], "</l>");
listLength[input+1] += 4;
}
else remove_nth_listend_tag_from_body(indx , 1);
}
else if(input == listLast-1 && check_list_endtag(list[input]))
{
if(!check_list_endtag(list[input-1]))
{
strcat(list[input-1], "</l>");
listLength[input-1] += 4;
}
else
{
int count = 0;
for(int i = 0 ; i <= input ; i++)
if(check_list_endtag(list[i])) count++;
remove_nth_listend_tag_from_body(indx, count);
}
}
else if(check_list_endtag(list[input]))
{
if(!check_list_endtag(list[input+1]))
{
strcat(list[input+1], "</l>");
listLength[input+1] += 4;
}
else if(!check_list_endtag(list[input-1]))
{
strcat(list[input-1], "</l>");
listLength[input-1] += 4;
}
else
{
int count = 0;
for(int i = 0 ; i <= input ; i++)
if(check_list_endtag(list[i])) count++;
remove_nth_listend_tag_from_body(indx, count);
}
}
listLength[input] = 0;
list[input][0] = 0;
listLast--;
for(int i = input ; i < listLast ; i++)
{
swap_string(list[i], list[i+1]);
int temp = listLength[i];
listLength[i] = listLength[i+1];
listLength[i+1] = temp;
}
}
void remove_nth_listend_tag_from_body(int indx, int n)
{
mainfile[0] = 0;
num_to_string(mainfile, indx);
strcat(mainfile, ".txt");
FILE* fp = fopen(mainfile, "r");
int count = 0;
textCapacity = 50;
text = (char*) realloc(text, textCapacity * sizeof(char));
text[0] = 0;
textLength = 0;
while(1)
{
char temp[500] = {};
int templen;
if(fgets(temp, 500, fp) == NULL) break;
templen = strlen(temp);
if(templen >= 3 && temp[0] == '<' && temp[1] == 'l' && temp[2] == '>') count++;
if(count == n)
{
for(int i = 0 ; i < templen ; i++)
temp[i] = temp[i+3];
}
if(templen + textLength >= textCapacity)
{
textCapacity *= 2;
text = (char*) realloc(text, textCapacity * sizeof(char));
}
templen = strlen(temp);
textLength += templen;
strcat(text, temp);
}
fclose(fp);
fp = fopen(mainfile, "w");
fprintf(fp, "%s", text);
fclose(fp);
textLength = 0;
}
void print_list_only()// remove tags
{
for(int i = 0 ; i < listLast ; i++)
{
if(check_list_endtag(list[i]))
{
if(i+1 < 10)
printf("%d. %s\b\b\b\b \b\b\b\b\n", i+1, list[i]);
else
printf("%d. %s\b\b\b\b\b \b\b\b\b\b\n", i+1, list[i]);
}
else printf("%d. %s\n", i+1, list[i]);
}
}
void serialize_list_only(FILE* fp)
{
for(int i = 0 ; i < listLast-1 ; i++)
fprintf(fp, "%s\n", list[i]);
fprintf(fp, "%s", list[listLast-1]);
}
bool check_list_endtag(char* s)
{
int len = strlen(s);
if(s[len-4] == '<' && s[len-3] == '/' && s[len-2] == 'l' && s[len-1] == '>')
return 1;
return 0;
}
void remove_list_endtag(char* s)
{
int len = strlen(s);
s[len-4] = 0;
}
void listEntries()
{
if(databaseLength == 0)
{
printf("THERE ARE NO ENTRIES!\n");
return;
}
printf("The following entries are present:\n\n");
for(int i = 0 ; i < databaseLength ; i++)
{
printf("Entry no %d\n"
"Date: %2.2d/%2.2d/%4.4d\n"
"Subject: %s\n", i+1, database[i].d, database[i].m, database[i].y, database[i].subject);
}
}
void userScan()
{
char ch;
textCapacity = 50;
text = (char*) realloc(text, textCapacity * sizeof(char));
textLength = 0;
listLast = 0;
memset(listLength, 0, listMaxIndex * sizeof(int));
int tag;
while((ch = _getch()) != escape)
{
if(ch == backspace)
{
tag = isTagged();
if(tag == 1 || tag == 2)
{
highlight_mode = !highlight_mode;
if(highlight_mode)
color(highlight_color);
else color(normal_color);
}
removeTag(tag); // incase there is a tag
printf("\b \b");
if(textLength != 0)
{
textLength--;
text[textLength] = 0;
}
}
else if(ch == stilde) // pressed highlight
{
highlight_mode = !highlight_mode;
tag = isTagged();
if(tag == 1) // if someone spammed highlightmode
removeTag(tag);
else if(highlight_mode == 1)
{
strcat(text, "<h>");
textLength += 3;
}
else
{
strcat(text, "</h>");
textLength += 4;
}
if(highlight_mode)
color(highlight_color);
else color(normal_color);
}
else if(ch == tilde && listLast < listMaxIndex) // pressed list mode
{
if(text[textLength-1] == '\n')
{
strcat(text, "<l>");
textLength += 3;
}
else
{
strcat(text, "\n<l>");
printf("\n");
textLength += 4;
}
int temp = listLast;
color(highlight_color);
printf("%d. ", listLast+1);
userListScan();
if(temp == listLast)
removeTag(isTagged());
}
else
{
if(ch == enter)
ch = '\n';
printf("%c", ch);
text[textLength] = ch;
textLength++;
text[textLength] = 0;
if(textLength == textCapacity-1)
{
textCapacity *= 2;
text = (char*) realloc(text, textCapacity * sizeof(char));
}
}
}
color(normal_color);
highlight_mode = 0;
}
void userListScan()
{
char ch;
bool flag = 0;
while((ch = _getch()) != tilde)
{
flag = 1;
if(ch == backspace)
{
if(listLength[listLast] != 0)
{
printf("\b \b");
listLength[listLast]--;
list[listLast][listLength[listLast]] = 0;
}
}
else if(ch == enter && listLast < listMaxIndex)
{
listLast++;
printf("\n%d. ", listLast+1);
}
else if(ch == enter)
{
listLast++;
break;
}
else
{
printf("%c", ch);
list[listLast][listLength[listLast]] = ch;
listLength[listLast]++;
list[listLast][listLength[listLast]] = 0;
if(listLength[listLast] == maxListLength) ch = enter;
}
}
for(int i = 0 ; i < maxListLength ; i++)
printf("\b");
for(int i = 0 ; i < maxListLength ; i++)
printf(" ");
for(int i = 0 ; i < maxListLength ; i++)
printf("\b");
if(listLast < listMaxIndex) listLength[listLast] = 0;
if(flag == 0) return;
strcat(list[listLast-1], "</l>");
listLength[listLast-1] += 4;
if(highlight_mode == 0)
color(normal_color);
}
int frontTagged(FILE* fp, FILE* ivan)
{
char tag[5] = "<";
for(int i = 1 ; i < 3 ; i++)
fscanf(fp, "%c", &tag[i]);
if(!strcmp(tag, "<h>"))
{
highlight_mode = 1;
color(highlight_color);
return 1;
}
else if(!strcmp(tag, "<l>"))
{
readListFromFile(ivan);
return 1;
}
fscanf(fp, "%c", &tag[3]);
if(!strcmp(tag, "</h>"))
{
highlight_mode = 0;
color(normal_color);
return 1;
}
fseek(fp, -4, SEEK_CUR);
return 0;
}
void readFromFile(int indx)
{
listLast = 0;
mainfile[0] = listfile[0] = 0;
num_to_string(mainfile, indx);
num_to_string(listfile, indx);
strcat(mainfile, ".txt");
strcat(listfile, "list.txt");
FILE* fp = fopen(mainfile, "r");
FILE* ivan = fopen(listfile, "r");
char ch;
textCapacity = 50;
text = (char*) realloc(text, textCapacity * sizeof(char));
textLength = 0;
memset(listLength, 0, listMaxIndex * sizeof(int));
while(1)
{
if(fscanf(fp, "%c", &ch) == EOF) break;
if(ch == '<')
frontTagged(fp, ivan);
else
{
if(textLength == textCapacity - 1)
{
textCapacity *= 2;
text = (char*) realloc(text, textCapacity * sizeof(char));
}
printf("%c", ch);
text[textLength] = ch;
textLength++;
}
}
text[textLength] = 0;
fclose(fp);
fclose(ivan);
}
void readListFromFile(FILE* ivan)
{
color(highlight_color);
while(1)
{
while(1)
{
char ch;
if(fscanf(ivan, "%c", &ch) == EOF) break;
if(ch == '\n') break;
list[listLast][listLength[listLast]] = ch;
listLength[listLast]++;
list[listLast][listLength[listLast]] = 0;
}
int flag = 0;
if(list[listLast][listLength[listLast] - 1] == '>') flag++;
if(list[listLast][listLength[listLast] - 2] == 'l') flag++;
if(list[listLast][listLength[listLast] - 3] == '/') flag++;
if(list[listLast][listLength[listLast] - 4] == '<') flag++;
listLast++;
if(flag == 4)
{
if(listLast < 10)
printf("%d. %s\b\b\b\b \b\b\b\b\n", listLast, list[listLast-1]);
else
printf("%d. %s\b\b\b\b\b \b\b\b\b\b\n", listLast, list[listLast-1]);
if(highlight_mode == 0)
color(normal_color);
break;
}
else
{
printf("%d. %s\n", listLast, list[listLast-1]);
}
}
}
// swap between modes frequently
// among us sus- text art
// finish these following functions one by one
// use memset to reset the listlength!
// backspace to previous line
// days gone used in the search (fix the leap year)
<file_sep>/Menu.h
// preprocessor macros
#define SETTINGS "settings.txt"
// global variables
bool dark_mode;
char* dark_bg = "COLOR 2";
char* white_bg = "COLOR B7";
int todays_event, next_event;
enum _color_codes{
black_on_black, indigo_on_black, green_on_black,
blue_on_black, red_on_black, purple_on_black,
yellow_on_black, white_on_black, grey_on_black,
indigo_on_white = 113, green_on_white, blue_on_white,
red_on_white, purple_on_white, yellow_on_white,
grey_on_white, black_on_white, white_theme = 188, white_selected,
white_not_selected = 176
};
enum _control_characters{
backspace = 8, enter = 13, escape = 27, tilde = 96, stilde = 126,
up_arrow = 72, down_arrow = 80
};
enum _color_codes selected;
enum _color_codes not_selected;
enum _color_codes theme;
// function prototypes
void color(int color);
void gotoxy(int x, int y);
void print_theme();
void start_up();
char login_menu();
char main_menu();
char password_manager_menu();
char diary_menu();
int todo_menu();
void load_settings();
void save_settings();
void toggle_darkmode();
void OpenReadMeFile();
void cheat_code(char ch); // :)
// taken from todolist
void Progress_Bar(void);
void Show_Bar (int);
// function definitions
void color(int color)
{
SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), color);
}
void gotoxy(int x, int y)
{
COORD c;
c.X = x;
c.Y = y;
SetConsoleCursorPosition(GetStdHandle(STD_OUTPUT_HANDLE), c);
}
void print_theme()
{
system("cls");
color(theme);
printf("\t\t _ _________ ______ _______ _ _ _______ \n"
"\t\t( \\ \\__ __/( ___\\ ( ____ \\( \\ ( \\ ( ___ )\n"
"\t\t| ( ) ( | ( ) )| ( \\/| ( | ( | ( ) |\n"
"\t\t| | | | | (__/ / | (__ | | | | | | | |\n"
"\t\t| | | | | __ ( | __) | | | | | | | |\n"
"\t\t| | | | | ( \\ \\ | ( | | | | | | | |\n"
"\t\t| (____/\\___) (___| )___) )| (____/\\| (____/\\| (____/\\| (___) |\n"
"\t\t(_______/\\_______/|/ \\___/ (_______/(_______/(_______/(_______)\n");
gotoxy(2, 22);
if(special_days[todays_event][0] != 0)
printf("Today is: %s", special_days[todays_event]);
gotoxy(2, 23);
if(todays_event == 366)
printf("Next event is: %s in 8 days", special_days[67]);
else if(next_event != 366)
printf("Next event is: %s in %d days", special_days[next_event], next_event-todays_event);
gotoxy(2, 24);
printf("The application was openned at %d/%d/%d", date, month, year);
color(selected);
}
void start_up()
{
load_settings();
color(theme);
printf("\t _ _ _ _ _ _ _ _ _ \n"
"\t| | | | | | | | | | (_)| | | || | \n"
"\t| | | | ___ | | ___ ___ _ __ ___ ___ | |__ ___ | | _ | |__ ___ | || | ___ \n"
"\t| |/\\| | / __\\| | / __| / _ \\ | '_ ` _ \\ / _ \\ | __| / _ \\ | | | || '_ \\ / _ \\| || | / _ \\ \n"
"\t\\ /\\ /| __/| || (__ | (_) || | | | | || __/ | |_ | (_) | | |____| || |_) || __/| || || (_) |\n"
"\t \\/ \\/ \\___||_| \\___| \\___/ |_| |_| |_| \\___| \\___| \\___/ \\_____/|_||_.__/ \\___||_||_| \\___/ \n");
printf("\n\n");
printf("\t\t _____ ______ _ \n"
"\t\t | __ \\ | ____| | | \n"
"\t\t | |__) |_ __ ___ ___ ___ | |__ _ __ | |_ ___ _ __ \n"
"\t\t | ___/| '__| / _ \\/ __|/ __| | __| | '_ \\ | __| / _ \\| '__| \n"
"\t\t | | | | | __/\\__ \\\\__ \\ | |____ | | | || |_ | __/| | \n"
"\t\t |_| |_| \\___||___/|___/ |______||_| |_| \\__| \\___||_| \n"
"\t\t \n"
"\t\t \n"
"\t\t _ _____ _ _ \n"
"\t\t | | / ____| | | (_) \n"
"\t\t | |_ ___ | | ___ _ __ | |_ _ _ __ _ _ ___ \n"
"\t\t | __| / _ \\ | | / _ \\ | '_ \\ | __|| || '_ \\ | | | | / _ \\\n"
"\t\t | |_ | (_) | | |____ | (_) || | | || |_ | || | | || |_| || __/\n"
"\t\t \\__| \\___/ \\_____| \\___/ |_| |_| \\__||_||_| |_| \\__,_| \\___|");
todays_event = days_gone(month, date);
if(todays_event == 366)
next_event = 67;
else next_event = todays_event+1;
while(special_days[next_event][0] == 0)
next_event++;
char ch;
while((ch = _getch()) != enter);
system("cls");
}
char login_menu()
{
print_theme();
int set_color[] = {selected, not_selected, not_selected, not_selected, not_selected, not_selected};
int counter = 0, options = 6, x_offset, y_offset;
char key;
x_offset = 40;
y_offset = 5;
while (1)
{
gotoxy(x_offset +4 -5, y_offset +5);
color(set_color[0]);
if(set_color[0] == selected)
printf("->>> Register <<<-");
else printf(" Register ");
gotoxy(x_offset +5 -5, y_offset +6);
color(set_color[1]);
if(set_color[1] == selected)
printf("->>> Login <<<-");
else printf(" Login ");
gotoxy(x_offset +1 -5, y_offset +7);
color(set_color[2]);
if(set_color[2] == selected)
printf("->>> List all users <<<-");
else printf(" List all users ");
gotoxy(x_offset +2 -5, y_offset +8);
color(set_color[3]);
if(set_color[3] == selected)
printf("->>> Change theme <<<-");
else printf(" Change theme ");
gotoxy(x_offset +0 -5, y_offset +9);
color(set_color[4]);
if(set_color[4] == selected)
printf("->>> Open readme file <<<-");
else printf(" Open readme file ");
gotoxy(x_offset +4 -5, y_offset +10);
color(set_color[5]);
if(set_color[5] == selected)
{
printf("->>> Exit :( <<<-");
gotoxy(x_offset +4 -1, y_offset +11);
printf("NOOOOOOOO");
}
else
{
printf(" Exit :( ");
gotoxy(x_offset +4 -1, y_offset +11);
printf(" ");
}
gotoxy(0,0);
key = _getch();
cheat_code(key);
if (key == -32 || key == 0)
key = _getch();
if (key == up_arrow)
counter = (counter + options -1) % options;
if (key == down_arrow)
counter = (counter + 1) % options;
if (key == enter)
{
fflush(stdin);
system("cls");
color(selected);
return counter +1 +'0';
}
set_color[counter] = selected;
set_color[(counter + 1) % options] = not_selected;
set_color[(counter + options -1) % options] = not_selected;
}
}
char main_menu()//diary-rem-pm, changeinfo-logout-deleteacc
{
print_theme();
int set_color[] = {selected, not_selected, not_selected, not_selected, not_selected, not_selected};
int counter = 0, options = 6, x_offset, y_offset;
x_offset = 40;
y_offset = 5;
char key;
while (1)
{
gotoxy(x_offset +3 -5, y_offset +5);
color(set_color[0]);
if(set_color[0] == selected)
printf("->>> Diary <<<-");
else printf(" Diary ");
gotoxy(x_offset -5 -5, y_offset +6);
color(set_color[1]);
if(set_color[1] == selected)
printf("->>> To-do List / Reminders <<<-");
else printf(" To-do List / Reminders ");
gotoxy(x_offset -2 -5, y_offset +7);
color(set_color[2]);
if(set_color[2] == selected)
printf("->>> Password Manager <<<-");
else printf(" Password Manager ");
gotoxy(x_offset -2 -5, y_offset +8);
color(set_color[3]);
if(set_color[3] == selected)
printf("->>> Change login info <<<-");
else printf(" Change login info ");
gotoxy(x_offset +3 -5, y_offset +9);
color(set_color[4]);
if(set_color[4] == selected)
printf("->>> Logout <<<-");
else printf(" Logout ");
gotoxy(x_offset -1 -5, y_offset +10);
color(set_color[5]);
if(set_color[5] == selected)
printf("->>> Delete account <<<-");
else printf(" Delete account ");
gotoxy(0,0);
key = _getch();
cheat_code(key);
if (key == -32 || key == 0)
key = _getch();
if (key == up_arrow)
counter = (counter + options -1) % options;
if (key == down_arrow)
counter = (counter + 1) % options;
if (key == enter)
{
fflush(stdin);
system("cls");
color(selected);
return counter +1 +'0';
}
set_color[counter] = selected;
set_color[(counter + 1) % options] = not_selected;
set_color[(counter + options -1) % options] = not_selected;
}
}
char password_manager_menu()//add-search-remove-listall-strength-gen-exit
{
print_theme();
int set_color[] = {selected, not_selected, not_selected, not_selected, not_selected, not_selected, not_selected};
int counter = 0, options = 7, x_offset, y_offset;
x_offset = 40;
y_offset = 5;
char key;
while (1)
{
gotoxy(x_offset +0 -4, y_offset +5);
color(set_color[0]);
if(set_color[0] == selected)
printf("->>> Add entry <<<-");
else printf(" Add entry ");
gotoxy(x_offset +0 -4, y_offset +6);
color(set_color[1]);
if(set_color[1] == selected)
printf("->>> Find entry <<<-");
else printf(" Find entry ");
gotoxy(x_offset -1 -4, y_offset +7);
color(set_color[2]);
if(set_color[2] == selected)
printf("->>> Remove entry <<<-");
else printf(" Remove entry ");
gotoxy(x_offset -1 -5, y_offset +8);
color(set_color[3]);
if(set_color[3] == selected)
printf("->>> List all entries <<<-");
else printf(" List all entries ");
gotoxy(x_offset -5 -5, y_offset +9);
color(set_color[4]);
if(set_color[4] == selected)
printf("->>> Password strength checker <<<-");
else printf(" Password strength checker ");
gotoxy(x_offset -2 -5, y_offset +10);
color(set_color[5]);
if(set_color[5] == selected)
printf("->>> Password generator <<<-");
else printf(" Password generator ");
gotoxy(x_offset +4 -5, y_offset +11);
color(set_color[6]);
if(set_color[6] == selected)
printf("->>> Exit <<<-");
else printf(" Exit ");
gotoxy(0,0);
key = _getch();
if (key == -32 || key == 0)
key = _getch();
if (key == up_arrow)
counter = (counter + options -1) % options;
if (key == down_arrow)
counter = (counter + 1) % options;
if (key == enter)
{
fflush(stdin);
system("cls");
color(selected);
return counter +1 +'0';
}
set_color[counter] = selected;
set_color[(counter + 1) % options] = not_selected;
set_color[(counter + options -1) % options] = not_selected;
}
}
char diary_menu()
{
print_theme();
int set_color[] = {selected, not_selected, not_selected, not_selected, not_selected, not_selected, not_selected};
int counter = 0, options = 7, x_offset, y_offset;
x_offset = 40;
y_offset = 5;
char key;
while (1)
{
gotoxy(x_offset +0 -3, y_offset +5);
color(set_color[0]);
if(set_color[0] == selected)
printf("->>> Add entry <<<-");
else printf(" Add entry ");
gotoxy(x_offset +0 -3, y_offset +6);
color(set_color[1]);
if(set_color[1] == selected)
printf("->>> Load entry <<<-");
else printf(" Load entry ");
gotoxy(x_offset -1 -3, y_offset +7);
color(set_color[2]);
if(set_color[2] == selected)
printf("->>> Remove entry <<<-");
else printf(" Remove entry ");
gotoxy(x_offset -1 -5, y_offset +8);
color(set_color[3]);
if(set_color[3] == selected)
printf("->>> List all entries <<<-");
else printf(" List all entries ");
gotoxy(x_offset -5 +2, y_offset +9);
color(set_color[4]);
if(set_color[4] == selected)
printf("->>> Edit list <<<-");
else printf(" Edit list ");
gotoxy(x_offset -2 -2, y_offset +10);
color(set_color[5]);
if(set_color[5] == selected)
printf("->>> Search entry <<<-");
else printf(" Search entry ");
gotoxy(x_offset +4 -5, y_offset +11);
color(set_color[6]);
if(set_color[6] == selected)
printf("->>> Exit <<<-");
else printf(" Exit ");
gotoxy(0,0);
key = _getch();
if (key == -32 || key == 0)
key = _getch();
if (key == up_arrow)
counter = (counter + options -1) % options;
if (key == down_arrow)
counter = (counter + 1) % options;
if (key == enter)
{
fflush(stdin);
system("cls");
color(selected);
return counter +1 +'0';
}
set_color[counter] = selected;
set_color[(counter + 1) % options] = not_selected;
set_color[(counter + options -1) % options] = not_selected;
}
}
int todo_menu()
{
print_theme();
int set_color[] = {selected, not_selected, not_selected, not_selected, not_selected, not_selected, not_selected, not_selected};
int counter = 0, options = 8, x_offset, y_offset;
x_offset = 40;
y_offset = 5;
char key;
while (1)
{
gotoxy(x_offset +0 -3, y_offset +5);
color(set_color[0]);
if(set_color[0] == selected)
printf("->>> Add entry <<<-");
else printf(" Add entry ");
gotoxy(x_offset +0 -3, y_offset +6);
color(set_color[1]);
if(set_color[1] == selected)
printf("->>> Edit entry <<<-");
else printf(" Edit entry ");
gotoxy(x_offset -1 -3, y_offset +7);
color(set_color[2]);
if(set_color[2] == selected)
printf("->>> Search entry <<<-");
else printf(" Search entry ");
gotoxy(x_offset -1 -3, y_offset +8);
color(set_color[3]);
if(set_color[3] == selected)
printf("->>> Remove entry <<<-");
else printf(" Remove entry ");
gotoxy(x_offset -5 -0, y_offset +9);
color(set_color[4]);
if(set_color[4] == selected)
printf("->>> List all entries <<<-");
else printf(" List all entries ");
gotoxy(x_offset -2 -4, y_offset +10);
color(set_color[5]);
if(set_color[5] == selected)
printf("->>> Mark entry as done <<<-");
else printf(" Mark entry as done ");
gotoxy(x_offset +4 -8, y_offset +11);
color(set_color[6]);
if(set_color[6] == selected)
printf("->>> Calendar View <<<-");
else printf(" Calendar View ");
gotoxy(x_offset +4 -4, y_offset +12);
color(set_color[7]);
if(set_color[7] == selected)
printf("->>> Exit <<<-");
else printf(" Exit ");
color(not_selected);
Progress_Bar();
gotoxy(0,0);
key = _getch();
if (key == -32 || key == 0)
key = _getch();
if (key == up_arrow)
counter = (counter + options -1) % options;
if (key == down_arrow)
counter = (counter + 1) % options;
if (key == enter)
{
fflush(stdin);
system("cls");
color(selected);
return counter +1;
}
set_color[counter] = selected;
set_color[(counter + 1) % options] = not_selected;
set_color[(counter + options -1) % options] = not_selected;
}
}
void load_settings()
{
FILE* fp = fopen(SETTINGS, "r");
if(fp)
fscanf(fp,"dark mode = %d", &dark_mode);
else dark_mode = 1;
if(dark_mode)
{
system(dark_bg);
selected = yellow_on_black;
not_selected = red_on_black;
theme = green_on_black;
}
else
{
system(white_bg);
selected = white_selected;
not_selected = white_not_selected;
theme = white_theme;
}
fclose(fp);
}
void save_settings()
{
FILE* fp = fopen(SETTINGS, "w");
fprintf(fp, "dark mode = %d", dark_mode);
fclose(fp);
}
void toggle_darkmode()
{
dark_mode = !dark_mode;
if(dark_mode)
{
system(dark_bg);
selected = 6;
not_selected = 4;
theme = 2;
}
else
{
system(white_bg);
selected = white_selected;
not_selected = white_not_selected;
theme = white_theme;
}
save_settings();
}
void OpenReadMeFile()
{
system("cls");
printf(read_me_text);
system("pause");
}
void cheat_code(char ch)
{
static int flashbang = 0, christmas = 0, sus = 0;
if((ch == 'F' || ch == 'f') && flashbang == 0) flashbang++;
else if((ch == 'L' || ch == 'l') && flashbang == 1) flashbang++;
else if((ch == 'A' || ch == 'a') && flashbang == 2) flashbang++;
else if((ch == 'S' || ch == 's') && flashbang == 3) flashbang++;
else if((ch == 'H' || ch == 'h') && flashbang == 4) flashbang++;
else if((ch == 'B' || ch == 'b') && flashbang == 5) flashbang++;
else if((ch == 'A' || ch == 'a') && flashbang == 6) flashbang++;
else if((ch == 'N' || ch == 'n') && flashbang == 7) flashbang++;
else if((ch == 'G' || ch == 'g') && flashbang == 8) flashbang++;
else flashbang = 0;
if(flashbang == 9)
{
system("cls");
for(int i = 0 ; i < 10 ; i++)
{
if(i%2 == 0) system(dark_bg);
else system(white_bg);
Sleep(200);
}
if(dark_mode) system(dark_bg);
else system(white_bg);
flashbang = 0;
print_theme();
}
if((ch == 'C' || ch == 'c') && christmas == 0) christmas++;
else if((ch == 'H' || ch == 'h') && christmas == 1) christmas++;
else if((ch == 'R' || ch == 'r') && christmas == 2) christmas++;
else if((ch == 'I' || ch == 'i') && christmas == 3) christmas++;
else if((ch == 'S' || ch == 's') && christmas == 4) christmas++;
else if((ch == 'T' || ch == 't') && christmas == 5) christmas++;
else if((ch == 'M' || ch == 'm') && christmas == 6) christmas++;
else if((ch == 'A' || ch == 'a') && christmas == 7) christmas++;
else if((ch == 'S' || ch == 's') && christmas == 8) christmas++;
else christmas = 0;
if(christmas == 9)
{
system("cls");
christmas_music();
christmas = 0;
print_theme();
}
if((ch == 'S' || ch == 's') && sus == 0) sus++;
else if((ch == 'U' || ch == 'u') && sus == 1) sus++;
else if((ch == 'S' || ch == 's') && sus == 2) sus++;
else sus = 0;
if(sus == 3)
{
sus = 0;
system("cls");
printf(" @@@@@ \n"
" @@@@ @@@ \n"
" @@@@@@@@ @@@@@@@@@ \n"
" @@ @ @@ @@ \n"
" @@ @@ @ @@ \n"
" @ @ @@ @@ \n"
" @@ @ @@@@@@@@ \n"
" @@@@@@@ @ \n"
" @ @@@@@ @ \n"
" @ @ @ @ \n"
" @ @ @ @ \n"
" @ @ @ @ \n"
" @ @ @ @@ \n"
" @@@ @@@ \n"
" \n"
" \n");
Sleep(1000);
// system("pause");
system("cls");
print_theme();
}
}
<file_sep>/Libello.c
#include "Main_log.h"
#include "eventlist.h"
#include "Menu.h"
#include "PasswordManager.h"
#include "diary.h"
#include "ToDOList.h"
int main(void)
{
data* UserName_Password;
char user[USERLEN], password[PASSLEN], question[QUESLEN], answer[ANSLEN], hint[HINTLEN];
FILE* fp;
generate_time();
start_up();
CreateReadMeFile();
// test
// diary();
// todo();
// return 0;
while(1)
{
char command;
command = login_menu();
if(command == '6')
break;
fp = fopen(MAINFILENAME, "rb");
if(fp != NULL)
UserName_Password = DeserializeMainFile(fp);
else
{
current_size = 0;
current_capacity = 4;
}
if(command == '1')
{
while(1)
{
printf("Give us the username you would like to use (max length 50):\n");
fgets(user, USERLEN, stdin);
int check = FindUser(UserName_Password, user);
if(check != -1)
printf("Sorry this username is already taken\n");
else break;
}
printf("Now give us your password (Max 50):\n");
scan_password(password);
printf("Confirm your password:\n");
char password_confirmation[PASSLEN];
scan_password(password_confirmation);
if(strcmp(password, password_confirmation))
{
printf("Your confirmation didn't match!\n");
system("pause");
continue;
}
printf("Give a hint for your password (just in case): ");
fgets(hint, HINTLEN, stdin);
printf("In case you forget your password, it's time to set up a backup Q/A\n");
printf("Question: ");
fgets(question, QUESLEN, stdin);
printf("Answer: ");
fgets(answer, ANSLEN, stdin);
UserName_Password = InsertUser(UserName_Password, user, password, question, answer, hint);
system("cls");
printf("<SUCCESSFULLY CREATED ACCOUNT>\n");
system("pause");
normalize_file(MAINFILENAME);
fp = fopen(MAINFILENAME, "wb");
SerializeMainFile(UserName_Password, fp);
read_only_file(MAINFILENAME);
hide_file(MAINFILENAME);
}
else if(command == '2')
{
int check;
printf("Give us your username:\n");
fgets(user, USERLEN, stdin);
fflush(stdin);
check = FindUser(UserName_Password, user);
if(check == -1)
{
printf("No such user found...try again\n");
system("pause");
continue;
}
printf("Now give us your password (give blank enter to show hint):\n");
scan_password(password);
fflush(stdin);
if(strlen(password) == 1 && password[0] == '\n')
{
printf("Hint : %s", UserName_Password[check].hint);
printf("Now give us your password (give blank enter to show Q/A):\n");
scan_password(password);
fflush(stdin);
}
if(strlen(password) == 1 && password[0] == '\n')
{
printf("Question : %s", UserName_Password[check].question);
printf("Now give us your answer:\n");
fgets(answer, PASSLEN, stdin);
fflush(stdin);
}
if((!(strlen(password) == 1 && password[0] == '\n') && !strcmp(UserName_Password[check].password, password))
|| !strcmp(UserName_Password[check].answer, answer))
{
system("cls");
printf("<SUCCESSFULLY LOGGED IN>\n");
system("pause");
// changing directory to the user folder
char folder_name[MAXFILENAMELEN] = {}, extension[MAXFILENAMELEN] = {};
num_to_string(extension, check);
strcpy(folder_name, "User");
strcat(folder_name, extension);
mkdir(folder_name);
normalize_file(folder_name);
// making the directory read-write
chdir(folder_name);
check = main_menu();
while(check == '3' || check == '2' || check == '1')
{
if(check == '1') diary();
if(check == '2') todo();
if(check == '3') PasswordManager();
check = main_menu();
}
// getting out of the directory
chdir("..");
read_only_file(folder_name);
hide_file(folder_name);
if(check == '6')
{
system("cls");
printf("Are you sure you want to delete this account?\n");
normalize_file(folder_name);
strcpy(extension, "del ");
strcat(extension, folder_name);
system(extension);
fflush(stdin);
int d = rmdir(folder_name);
if(!d)
{
RemoveUser(UserName_Password, FindUser(UserName_Password, user));
printf("<SUCCESSFULLY DELETED USER>\n");
normalize_file(MAINFILENAME);
fp = fopen(MAINFILENAME, "wb");
SerializeMainFile(UserName_Password, fp);
read_only_file(MAINFILENAME);
hide_file(MAINFILENAME);
system("pause");
}
else
{
read_only_file(folder_name);
hide_file(folder_name);
printf("Verification failed, ABORTING\n");
system("pause");
}
}
if(check == '4')
{
char temp_user[USERLEN] = {};
int user_id = FindUser(UserName_Password, user);
while(1)
{
printf("Give us the username you would like to use (max length 50):\n");
fgets(temp_user, USERLEN, stdin);
fflush(stdin);
check = FindUser(UserName_Password, temp_user);
if(!strcmp(temp_user, user))
break;
if(check != -1)
printf("Sorry this username is already taken\n");
else break;
}
printf("Enter your password: \n");
scan_password(password);
fflush(stdin);
printf("Confirm your password:\n");
char password_confirmation[PASSLEN];
scan_password(password_confirmation);
if(strcmp(password, password_confirmation))
{
printf("Your confirmation didn't match!\n");
system("pause");
continue;
}
printf("Enter your password hint: \n");
fgets(hint, HINTLEN, stdin);
fflush(stdin);
printf("Enter your backup question: \n");
fgets(question, QUESLEN, stdin);
fflush(stdin);
printf("Enter the answer to your question: \n");
fgets(answer, ANSLEN, stdin);
strcpy(UserName_Password[user_id].user, temp_user);
strcpy(UserName_Password[user_id].password, <PASSWORD>);
strcpy(UserName_Password[user_id].hint, hint);
strcpy(UserName_Password[user_id].question, question);
strcpy(UserName_Password[user_id].answer, answer);
normalize_file(MAINFILENAME);
fp = fopen(MAINFILENAME, "wb");
SerializeMainFile(UserName_Password, fp);
read_only_file(MAINFILENAME);
hide_file(MAINFILENAME);
printf("<SUCCESSFULLY CHANGED INFO>\n");
system("pause");
}
}
else if(strlen(password) == 1 && password[0] == '\n')
{
system("cls");
printf("Sorry, wrong answer\n");
system("pause");
}
else
{
system("cls");
printf("Sorry, wrong password\n");
system("pause");
}
}
else if(command == '3')
{
if(current_size == 0)
printf("There are no users currently\n");
else
{
printf("All the current users are:\n");
ListAllUsers(UserName_Password);
}
system("pause");
}
else if(command == '4')
toggle_darkmode();
else if(command == '5')
OpenReadMeFile();
}
return 0;
} | 83b769309d6139f61d877477f3ce1f12f08dcbdd | [
"C"
] | 7 | C | Inzamam20/Inzamam20 | 6865f9d68698b9c0d03ec70352fe0038a5e23fea | 1d7e05ae0d52eda06099dbc0eed3550f751e4050 |
refs/heads/main | <repo_name>schae42/appymeal<file_sep>/server/config/mongoose.config.js
const mongoose = require('mongoose');
mongoose.connect(`mongodb://localhost/${process.env.DB_NAME}`,{
useNewUrlParser: true,
useUnifiedTopology: true
})
.then((res) => console.log(`DB connection established ${process.env.DB_NAME}`))
.catch((err) => console.log(`Something went wrong: ${err}`));<file_sep>/client/src/views/Main.jsx
import {useState,useEffect} from "react";
import Axios from 'axios';
import { navigate } from "@reach/router";
const Main = props => {
const [users,setUsers] = useState([]);
const [loggedIn,setLoggedIn] = useState( JSON.parse(localStorage.getItem("user")) || {firstName:"John",
lastName:"Robert"})
useEffect(() => {
Axios.get("http://localhost:8000/api/users",{withCredentials:true})
.then(res => setUsers(res.data.results))
.catch(err => {
if(err.response.status === 401){
navigate('/');
}
})
})
const logout = () => {
Axios.get('http://localhost:8000/api/logout',{withCredentials:true})
.then(res => {
localStorage.clear();
navigate('/')})
.catch(err => console.log(err));
}
return(
<div>
<h1 className="text-center">Welcome {loggedIn.firstName}</h1>
<button onClick={logout} className="btn btn-warning">Logout</button>
</div>
);
}
export default Main; | 9007e6438a3149611c2740d09d2fdaafa503a195 | [
"JavaScript"
] | 2 | JavaScript | schae42/appymeal | 625d76d86978b17da51c323844a17ff399fa23d6 | 5928f6a90f059bb6314880e3ee0cfe18950df6c4 |
refs/heads/main | <repo_name>GaetanOff/pendu<file_sep>/main.py
import turtle
import random
screen = turtle.getscreen()
screen.setup(width=500, height=350)
words = ["apercevoir",
"ascensseur",
"aspirateur",
"bijouterie",
"brouillard",
"capitaines",
"celebrites",
"definition",
"economiser",
"entraineur"]
words = [i.upper() for i in words]
word = random.choice(words)
run = True
score = 15
turtle.up()
hide = word[0] + " " + " ".join("_" for _ in range(8)) + " " + word[-1]
turtle.setx(-160)
turtle.write(hide, font=("arial", 30, 'normal'))
turtle.setpos(-200, -175)
turtle.write(f"score : {score}", font=("arial", 30, 'normal'))
letter = turtle.textinput("Choississez une lettre", "Veuillez ne mettre qu'une seule lettre")
while run:
if letter is None:
break
if letter.upper() not in word:
score -= 1
tab = hide.split()
for i in range(len(word)):
if word[i] == letter.upper():
tab[i] = letter.upper()
hide = " ".join(tab)
turtle.clear()
turtle.setpos(-160, 0)
turtle.write(hide, font=("arial", 30, 'normal'))
turtle.setpos(-200, -175)
turtle.write(f"score : {score}", font=("arial", 30, 'normal'))
if "".join(hide.split()) == word:
turtle.setpos(-160, 100)
turtle.write("Bravo, vous avez gagné", font=("arial", 15, 'normal'))
break
if score == 0:
turtle.setpos(-160, 100)
turtle.write(f'Dommage, le mot était "{word}"', font=("arial", 15, 'normal'))
break
if letter.upper() in word:
letter = turtle.textinput("Choississez une lettre", "Veuillez ne mettre qu'une seule lettre")
else:
letter = turtle.textinput("Mauvaise lettre !", "Veuillez ne mettre qu'une seule lettre")
if len(letter) != 1:
continue
turtle.mainloop()
| e1d92345dc4eee7e2edb84e3a925451f199510dd | [
"Python"
] | 1 | Python | GaetanOff/pendu | 961d63244e132b9a1b0712e15fb6be1e7af00680 | 25e6ca11a35b3f9b6be60a66cb1e86cdcb0774f1 |
refs/heads/master | <repo_name>mboiteau/PiSenseBoard<file_sep>/Client.py
import paho.mqtt.client as mqtt
import paho.mqtt.publish as publish
from Utility import Utility
class Client:
def start(self):
self.mqttc.loop_forever()
def connect(self):
self.mqttc.connect("localhost", 1883, 60)
def on_connect(self, mqttc, obj, flags, rc):
print("Connected with result code "+str(rc))
mqttc.subscribe("/raspberry/led/#")
def on_message(self, client, userdata, msg):
print(msg.topic+" "+str(msg.payload))
if "/raspberry/led/" in msg.topic:
Utility.led_func(msg.topic, msg.payload)
def on_publish(self, mqttc, obj, mid):
print("mid: " + str(mid))
def on_subscribe(self, mqttc, obj, mid, granted_qos):
print("Subscribed: " + str(mid) + " " + str(granted_qos))
def __init__(self):
self.mqttc = mqtt.Client()
self.mqttc.on_message = self.on_message
self.mqttc.on_connect = self.on_connect
self.mqttc.on_publish = self.on_publish
self.mqttc.on_subscribe = self.on_subscribe
<file_sep>/README.md
# PiSenseBoard
A project destinated to the Sense Hat for the Raspberry Pi.
The aim of this project is to collect the datas from the sensors of the Sense Hat module and send them to a mobile app via MQTT protocol.
It also allows to control the card behavior like the brightness of the led matrix, you can also choose a program between multiple program to control the led matrix.
<file_sep>/main.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
import threading
import time
from Client import Client
from Utility import Utility
def thread_pool():
refresh_rate = 5
while True:
cpu_temp_thread=threading.Thread(target=Utility.cpu_temp)
cpu_temp_thread.start()
time.sleep(0.05)
cpu_usage_thread=threading.Thread(target=Utility.cpu_usage)
cpu_usage_thread.start()
time.sleep(0.05)
ambiant_temp_thread=threading.Thread(target=Utility.ambiant_temp)
ambiant_temp_thread.start()
time.sleep(0.05)
ambiant_pressure_thread=threading.Thread(target=Utility.ambiant_pressure)
ambiant_pressure_thread.start()
time.sleep(0.05)
ambiant_humidity_thread=threading.Thread(target=Utility.ambiant_humidity)
ambiant_humidity_thread.start()
time.sleep(refresh_rate)
print("\n")
def main():
client = Client()
client.connect()
main_thread=threading.Thread(target=thread_pool)
main_thread.daemon = True
main_thread.start()
client.start()
if __name__ == "__main__":
main()
<file_sep>/Utility.py
# -*- coding: utf-8 -*-
from sense_hat import SenseHat
import psutil
import time
import threading
import paho.mqtt.publish as publish
from gpiozero import CPUTemperature
sense = SenseHat()
class Utility:
temp = 0
pressure = 0
humidity = 0
cputemp = 0
usage = 0
pixels = None
mode_activated = False
static_clr_activated = False
current_thread = None
@staticmethod
def rgb_parsing(rgb_result):
parsed = (rgb_result.replace(" ", "")).split(',')
rgb_array = []
for i in parsed:
s = ''.join([c for c in i if c.isdigit()])
rgb_array.append(int(s))
return rgb_array
@staticmethod
def led_mode_selector(mode):
if mode == "0":
Utility.current_thread=threading.Thread(target=Utility.rainbow_mode)
Utility.current_thread.start()
if mode == "1":
Utility.current_thread=threading.Thread(target=Utility.strombo_mode)
Utility.current_thread.start()
if mode == "2":
Utility.current_thread=threading.Thread(target=Utility.color_mode)
Utility.current_thread.start()
if mode == "3":
Utility.current_thread=threading.Thread(target=Utility.display_text_mode)
Utility.current_thread.start()
@staticmethod
def next_colour(pix):
r = pix[0]
g = pix[1]
b = pix[2]
if (r == 255 and g < 255 and b == 0):
g += 1
if (g == 255 and r > 0 and b == 0):
r -= 1
if (g == 255 and b < 255 and r == 0):
b += 1
if (b == 255 and g > 0 and r == 0):
g -= 1
if (b == 255 and r < 255 and g == 0):
r += 1
if (r == 255 and b > 0 and g == 0):
b -= 1
pix[0] = r
pix[1] = g
pix[2] = b
@staticmethod
def display_text_mode():
t = threading.currentThread()
while getattr(t, "do_run", True):
s = "Temp CPU: "+str(round(Utility.temp,1))+" CPU usage: "+str(Utility.usage)
sense.show_message(s, text_colour=[255, 0, 0])
@staticmethod
def strombo_mode():
pixels_strombo = []
for i in range(0, 64):
pixels_strombo.append([252, 248, 252])
print(pixels_strombo)
sense.set_pixels(pixels_strombo)
msleep = lambda x: time.sleep(x / 1000.0)
t = threading.currentThread()
while getattr(t, "do_run", True):
sense.set_pixels(pixels_strombo)
msleep(50)
sense.clear()
msleep(50)
@staticmethod
def color_mode():
msleep = lambda x: time.sleep(x / 1000.0)
t = threading.currentThread()
r, g, b, i = 0, 0, 0, 0
while getattr(t, "do_run", True):
if i == 0:
r = 255
g = 0
b = 0
if i == 1:
r = 0
g = 255
b = 0
if i == 3:
r = 0
g = 0
b = 255
i = 0
else:
i = i + 1
sense.clear(r,g,b)
msleep(500)
@staticmethod
def rainbow_mode():
pixels_rainbow = [
[255, 0, 0], [255, 0, 0], [255, 87, 0], [255, 196, 0], [205, 255, 0], [95, 255, 0], [0, 255, 13], [0, 255, 122],
[255, 0, 0], [255, 96, 0], [255, 205, 0], [196, 255, 0], [87, 255, 0], [0, 255, 22], [0, 255, 131], [0, 255, 240],
[255, 105, 0], [255, 214, 0], [187, 255, 0], [78, 255, 0], [0, 255, 30], [0, 255, 140], [0, 255, 248], [0, 152, 255],
[255, 223, 0], [178, 255, 0], [70, 255, 0], [0, 255, 40], [0, 255, 148], [0, 253, 255], [0, 144, 255], [0, 34, 255],
[170, 255, 0], [61, 255, 0], [0, 255, 48], [0, 255, 157], [0, 243, 255], [0, 134, 255], [0, 26, 255], [83, 0, 255],
[52, 255, 0], [0, 255, 57], [0, 255, 166], [0, 235, 255], [0, 126, 255], [0, 17, 255], [92, 0, 255], [201, 0, 255],
[0, 255, 66], [0, 255, 174], [0, 226, 255], [0, 117, 255], [0, 8, 255], [100, 0, 255], [210, 0, 255], [255, 0, 192],
[0, 255, 183], [0, 217, 255], [0, 109, 255], [0, 0, 255], [110, 0, 255], [218, 0, 255], [255, 0, 183], [255, 0, 74]
]
msleep = lambda x: time.sleep(x / 1000.0)
t = threading.currentThread()
while getattr(t, "do_run", True):
for pix in pixels_rainbow:
Utility.next_colour(pix)
sense.set_pixels(pixels_rainbow)
msleep(2)
sense.clear()
@staticmethod
def led_func(topic, data):
if topic == "/raspberry/led/luminosity":
if data == "1":
print("low light off")
sense.low_light = False
if data == "0":
print("low light on")
sense.low_light = True
if topic == "/raspberry/led/state":
if data == "True":
print("led state on")
Utility.static_clr_activated = True
Utility.mode_activated = False
sense.set_pixels(Utility.pixels)
if data == "False":
print("led state off\n")
Utility.static_clr_activated = False
Utility.pixels = sense.get_pixels()
print(Utility.pixels)
sense.clear()
if topic == "/raspberry/led/color" and Utility.static_clr_activated == True:
print(data)
rgb_array = Utility.rgb_parsing(data)
sense.clear(rgb_array[0], rgb_array[1], rgb_array[2])
if topic == "/raspberry/led/mode/activate":
if data == "True":
Utility.mode_activated = True
Utility.static_clr_activated = False
Utility.led_mode_selector("0")
if data == "False":
Utility.current_thread.do_run = False
Utility.current_thread.join()
sense.set_pixels(Utility.pixels)
Utility.mode_activated = False
if topic == "/raspberry/led/mode" and Utility.mode_activated == True:
if Utility.current_thread is not None:
Utility.current_thread.do_run = False
Utility.current_thread.join()
Utility.led_mode_selector(data)
@staticmethod
def ambiant_temp():
Utility.temp = sense.get_temperature()
print("Ambiant temperature: %s °C" % Utility.temp)
publish.single("/raspberry/ambiant_sensors/temp", Utility.temp, hostname="localhost")
@staticmethod
def ambiant_pressure():
Utility.pressure = sense.get_pressure()
print("Ambiant pressure: %s Millibars" % Utility.pressure)
publish.single("/raspberry/ambiant_sensors/pressure", Utility.pressure, hostname="localhost")
@staticmethod
def ambiant_humidity():
Utility.humidity = sense.get_humidity()
print("Humidity: %s %%rH" % Utility.humidity)
publish.single("/raspberry/ambiant_sensors/humidity", Utility.humidity, hostname="localhost")
@staticmethod
def cpu_temp():
Utility.cputemp = CPUTemperature().temperature
print("CPU temperature: %f °C" % Utility.cputemp)
publish.single("/raspberry/cpu/temp", Utility.cputemp, hostname="localhost")
@staticmethod
def cpu_usage():
Utility.usage = psutil.cpu_percent(interval=0, percpu=False)
print("CPU usage: %f %%" % Utility.usage)
publish.single("/raspberry/cpu/usage", Utility.usage, hostname="localhost")
| d9c5043724b4812bef2af4c5ba82c0e6288454d4 | [
"Markdown",
"Python"
] | 4 | Python | mboiteau/PiSenseBoard | 8f4fe0b2de3be7a22aeee0d2ee51879a10efe437 | 7088b745721ea7a4c7653f3fc8e635a567db7040 |
refs/heads/master | <repo_name>DidkowSky/OneTapGolf<file_sep>/Assets/Scripts/DotPool.cs
using System.Collections.Generic;
using UnityEngine;
public class DotPool : MonoBehaviour
{
#region public variables
public GameObject DotPrefab;
public int amountToPool;
#endregion
#region private variables
private List<GameObject> pooledObjects = new List<GameObject>();
#endregion
#region Unity methods
private void Start()
{
for (int i = 0; i < amountToPool; i++)
{
var newObject = Instantiate(DotPrefab, transform);
newObject.SetActive(false);
pooledObjects.Add(newObject);
}
}
#endregion
#region public methods
public GameObject GetPooledObject()
{
foreach (var pooledObject in pooledObjects)
{
if (!pooledObject.activeInHierarchy)
{
return pooledObject;
}
}
return null;
}
public void ReturnAllObjectsToPool()
{
foreach (var pooledObject in pooledObjects)
{
if (pooledObject.activeInHierarchy)
{
pooledObject.transform.parent = transform;
pooledObject.SetActive(false);
}
}
}
#endregion
}
<file_sep>/Assets/Scripts/TopScore.cs
using System;
using System.Globalization;
using UnityEngine;
using UnityEditor;
using Tools;
[CreateAssetMenu(fileName = "TopScore", menuName = "ScoreSystem/TopScore")]
public class TopScore : ScriptableObject
{
#region private variables
[SerializeField]
private Result topScoreResult = new Result();
#endregion
#region public methods
public void SetTopScore(int score)
{
if (score > topScoreResult.Score)
{
topScoreResult.Score = score;
topScoreResult.DateTime = DateTime.Now.ToString(new CultureInfo("pl-PL"));
EditorUtility.SetDirty(this);
}
}
public int GetScore()
{
return topScoreResult.Score;
}
public string GetDateTime()
{
return topScoreResult.DateTime;
}
public void ResetTopScore()
{
topScoreResult.Score = 0;
topScoreResult.DateTime = string.Empty;
EditorUtility.SetDirty(this);
}
#endregion
}
<file_sep>/Assets/Scripts/BallScript.cs
using System;
using UnityEngine;
[RequireComponent(typeof(DotPool))]
public class BallScript : MonoBehaviour
{
#region events
public event EventHandler<bool> Collision;
#endregion
#region private variables
private DotPool dotPool;
private bool isInteractable = true;
private Vector3 startingPosition;
private float frequency = 1.8f;
private float magnitude = 0.5f;
private float directionVectorMovementSpeed = 1f;
private bool isMoving = false;
private float time = 0.0f;
private const float speed = 3.0f;
private const float minFrequency = 0.6f;
private const float maxFrequency = 1.8f;
private const float minMagnitude = 0.5f;
private const float maxMagnitude = 5.0f;
private const string flagpoleLayerName = "Flagpole";
#endregion
#region Unity methods
void Start()
{
dotPool = GetComponent<DotPool>();
startingPosition = transform.localPosition;
}
void Update()
{
if (isMoving)
{
time += Time.deltaTime;
transform.position = startingPosition + (transform.right * time * speed) + (transform.up * Mathf.Sin(time * frequency) * magnitude);
}
}
private void OnTriggerEnter2D(Collider2D collision)
{
ResetPosition();
OnCollision(collision.gameObject.layer == LayerMask.NameToLayer(flagpoleLayerName));
}
#endregion
#region public methods
public void SetInteractable(bool interactable)
{
isInteractable = interactable;
}
public void Kick()
{
if (isInteractable && !isMoving && frequency < maxFrequency && magnitude > minMagnitude)
{
dotPool.ReturnAllObjectsToPool();
time = 0.0f;
isMoving = true;
}
}
public void IncrementKickingStrength()
{
if (isInteractable && !isMoving)
{
time += Time.deltaTime;
dotPool.ReturnAllObjectsToPool();
frequency = Mathf.Lerp(maxFrequency, minFrequency, time * directionVectorMovementSpeed);
magnitude = Mathf.Lerp(minMagnitude, maxMagnitude, time * directionVectorMovementSpeed);
DrawBallPath();
if (frequency <= minFrequency && magnitude >= maxMagnitude)
{
Kick();
}
}
}
public void ResetSettings()
{
directionVectorMovementSpeed = 1.0f;
frequency = maxFrequency;
magnitude = minMagnitude;
}
public void IncrementDirectionVectorMovementSpeed()
{
directionVectorMovementSpeed += 0.1f;
}
#endregion
#region private methods
private void ResetPosition()
{
time = 0.0f;
isMoving = false;
transform.localPosition = startingPosition;
}
private void DrawBallPath()
{
for (float i = 0.0f; i < 10f; i += 0.2f)
{
var pooledObject = dotPool.GetPooledObject();
if (pooledObject != null)
{
var position = (transform.right * i * speed) + (transform.up * Mathf.Sin(i * frequency) * magnitude);
if (position.y >= 0)
{
pooledObject.transform.parent = transform;
pooledObject.transform.localPosition = position;
pooledObject.SetActive(true);
}
else
{
break;
}
}
else
{
break;
}
}
}
private void OnCollision(bool isHoleCollision)
{
Collision?.Invoke(this, isHoleCollision);
}
#endregion
}
<file_sep>/Assets/Scripts/GameManagerScript.cs
using UnityEngine;
using UnityEngine.UI;
using Tools;
public class GameManagerScript : MonoBehaviour
{
#region public variables
public Canvas GameOverCanvas;
public Text YourScoreGUI;
public Text TopScoreGUI;
public Text ScoreTextGUI;
[Space]
public TopScore TopScoreObject;
[Space]
public BallScript Ball;
[Space]
public Transform DynamicObjectsParent;
public SpriteRenderer TerrainGroundUp;
public GameObject FlagPole;
#endregion
#region private variables
private int score;
private Collider2D flagpoleCollider;
#endregion
#region Unity methods
private void Awake()
{
DontDestroyOnLoad(this.gameObject);
}
private void Start()
{
GameOverCanvas.WarnIfReferenceIsNull(gameObject);
YourScoreGUI.WarnIfReferenceIsNull(gameObject);
TopScoreGUI.WarnIfReferenceIsNull(gameObject);
ScoreTextGUI.WarnIfReferenceIsNull(gameObject);
TopScoreObject.WarnIfReferenceIsNull(gameObject);
Ball.WarnIfReferenceIsNull(gameObject);
DynamicObjectsParent.WarnIfReferenceIsNull(gameObject);
TerrainGroundUp.WarnIfReferenceIsNull(gameObject);
FlagPole.WarnIfReferenceIsNull(gameObject);
if (Ball != null)
{
Ball.Collision += Ball_Collision;
}
if (FlagPole != null)
{
FlagPole = Instantiate(FlagPole, DynamicObjectsParent);
}
if (FlagPole != null)
{
flagpoleCollider = FlagPole.GetComponentInChildren<Collider2D>();
}
GenerateLevel();
}
private void OnDestroy()
{
if (Ball != null)
{
Ball.Collision -= Ball_Collision;
}
}
private void Update()
{
if (Input.GetMouseButton(0))
{
Ball.IncrementKickingStrength();
}
else if (Input.GetMouseButtonUp(0))
{
Ball.Kick();
}
}
#endregion
#region public methods
public void Restart()
{
GenerateLevel();
ResetScore();
Ball.SetInteractable(true);
SwitchGameOverCanvas(false);
}
#endregion
#region private methods
private void GenerateLevel()
{
if (DynamicObjectsParent != null && FlagPole != null && TerrainGroundUp != null)
{
var xMin = TerrainGroundUp.bounds.center.x;
var xMax = TerrainGroundUp.bounds.center.x + TerrainGroundUp.bounds.extents.x - flagpoleCollider.bounds.extents.x;
FlagPole.transform.position = new Vector2(Random.Range(xMin, xMax), TerrainGroundUp.bounds.center.y);
}
}
private void GameOver()
{
Ball.ResetSettings();
Ball.SetInteractable(false);
if (TopScoreObject != null)
{
TopScoreObject.SetTopScore(score);
TopScoreGUI.SetText(TopScoreObject.GetScore());
}
YourScoreGUI.SetText(score);
SwitchGameOverCanvas(true);
ResetScore();
}
private void IncrementScore()
{
score++;
ScoreTextGUI.SetText(score);
}
private void ResetScore()
{
score = 0;
ScoreTextGUI.SetText(score);
}
private void SwitchGameOverCanvas(bool active)
{
if (GameOverCanvas != null)
{
GameOverCanvas.gameObject.SetActive(active);
}
}
private void Ball_Collision(object sender, bool isHoleCollision)
{
if (isHoleCollision)
{
GenerateLevel();
Ball.IncrementDirectionVectorMovementSpeed();
IncrementScore();
}
else
{
GameOver();
}
}
#endregion
}
<file_sep>/Assets/Scripts/Tools/ExtensionMethods.cs
using UnityEngine;
using UnityEngine.UI;
namespace Tools
{
public static class ExtensionMethods
{
public static void WarnIfReferenceIsNull(this GameObject component, GameObject objectReference)
{
if (component == null)
Debug.LogWarning("Missing GameObject reference!", objectReference);
}
public static void WarnIfReferenceIsNull(this Component component, GameObject objectReference)
{
if (component == null)
Debug.LogWarning($"Missing Component reference!", objectReference);
}
public static void WarnIfReferenceIsNull(this Object component, GameObject objectReference)
{
if (component == null)
Debug.LogWarning("Missing Object reference!", objectReference);
}
public static void SetText(this Text textComponent, string text)
{
if (textComponent != null)
textComponent.text = text;
}
public static void SetText(this Text textComponent, int value)
{
if (textComponent != null)
textComponent.text = value.ToString();
}
}
}
<file_sep>/Assets/Scripts/Tools/Result.cs
using System;
namespace Tools
{
[Serializable]
public struct Result
{
public string DateTime;
public int Score;
}
}
<file_sep>/Assets/Scripts/FlagAnimatorScript.cs
using UnityEngine;
using Tools;
public class FlagAnimatorScript : MonoBehaviour
{
#region public variables
public GameObject FirstFlag;
public GameObject SecondFlag;
#endregion
#region Unity methods
private void OnEnable()
{
FirstFlag.WarnIfReferenceIsNull(gameObject);
SecondFlag.WarnIfReferenceIsNull(gameObject);
if (FirstFlag != null)
{
FirstFlag.SetActive(true);
}
if (SecondFlag != null)
{
SecondFlag.SetActive(false);
}
InvokeRepeating("Animate", 0, 0.25f);
}
private void OnDisable()
{
CancelInvoke("Animate");
}
#endregion
#region private methods
private void Animate()
{
if (FirstFlag != null)
{
FirstFlag.SetActive(!FirstFlag.activeSelf);
}
if (SecondFlag != null)
{
SecondFlag.SetActive(!SecondFlag.activeSelf);
}
}
#endregion
}
| 222fb0ee9148d32f4ce5cb95d6dae7a2a1477990 | [
"C#"
] | 7 | C# | DidkowSky/OneTapGolf | ea9cff7475105d0261eddd0e217272ff96f1efe0 | af26a4f2ef625158db4d6e57e83113c462b69da2 |
refs/heads/master | <repo_name>kiril-kostadinov-tues/AmexChallenge<file_sep>/src/CardMerchant.java
public class CardMerchant {
}
| f6c6eecda2b2de1775a98d2f3fae7ac069295a0c | [
"Java"
] | 1 | Java | kiril-kostadinov-tues/AmexChallenge | d5b95ccb38370b58a2c6c4ea9d594397d15b33be | 5d0923459f8b7359899771b55a42d29dc89884cd |
refs/heads/master | <repo_name>ggartzia/coursera-test<file_sep>/module3-solution/js/script.js
$('.nav a').on('click', function() {
//$('.navbar-toggle').click();
}); | bae8784a1ec212bad58c5ce6d35d0093d7026a7e | [
"JavaScript"
] | 1 | JavaScript | ggartzia/coursera-test | 733d52cc6be08558030f644fadeae9412eecd687 | 74b978bb3bd091dee2564d291f0ea74da32fdaab |
refs/heads/master | <repo_name>jbpratt/gomobile-android<file_sep>/sample/sample.go
package sample
type Bridge struct{}
<file_sep>/Dockerfile
FROM ubuntu:20.04
WORKDIR /root
ARG DEBIAN_FRONTEND=noninteractive
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get -qqy update && \
apt-get -qqy --no-install-recommends install \
software-properties-common build-essential \
wget git unzip openjdk-8-jdk -qqy && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV JAVA_HOME="/usr/lib/jvm/java-8-openjdk-amd64/jre" \
PATH=$PATH:$JAVA_HOME/bin
ARG SDK_VERSION=sdk-tools-linux-3859397
ARG ANDROID_BUILD_TOOLS_VERSION=27.0.3
ARG ANDROID_PLATFORM_VERSION="android-28"
ENV SDK_VERSION=$SDK_VERSION \
ANDROID_BUILD_TOOLS_VERSION=$ANDROID_BUILD_TOOLS_VERSION \
ANDROID_HOME=/root
RUN wget -O tools.zip https://dl.google.com/android/repository/${SDK_VERSION}.zip && \
unzip tools.zip && rm tools.zip && \
chmod a+x -R $ANDROID_HOME && \
chown -R root:root $ANDROID_HOME
ENV PATH=$PATH:$ANDROID_HOME/tools:$ANDROID_HOME/tools/bin
# https://askubuntu.com/questions/885658/android-sdk-repositories-cfg-could-not-be-loaded
RUN mkdir -p ~/.android && \
touch ~/.android/repositories.cfg && \
echo y | sdkmanager "platform-tools" && \
echo y | sdkmanager "build-tools;$ANDROID_BUILD_TOOLS_VERSION" && \
echo y | sdkmanager "platforms;$ANDROID_PLATFORM_VERSION" && \
echo y | sdkmanager "ndk-bundle"
ENV PATH=$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/build-tools
RUN add-apt-repository ppa:longsleep/golang-backports && \
apt-get update && \
apt-get -qqy --no-install-recommends install golang-go && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV PATH /usr/local/go/bin:$PATH
ENV GOPATH /go
ENV PATH $GOPATH/bin:$PATH
RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH"
WORKDIR $GOPATH
RUN go get golang.org/x/mobile/cmd/gomobile
RUN gomobile version
<file_sep>/README.md
```
docker run -v $(pwd):/go/src/x/ --rm \
docker.pkg.github.com/jbpratt/gomobile-android/gomobile-android:latest /bin/bash -c \
"cd /go/src/x && gomobile init && gomobile bind -o ./sample.aar -target=android ./sample"
```
| d5a3a73d8a0d1e8f2b7217edcbff1dd42c8ead9f | [
"Markdown",
"Go",
"Dockerfile"
] | 3 | Go | jbpratt/gomobile-android | 6553d2429409893240b8bc5cace5962cb26b934d | c01af26680a297d54e13c34d83a7bc71294fd30c |
refs/heads/master | <repo_name>YannPaulmier/BotlyStudio-App<file_sep>/builder/sketch/sketch.ino
#include <Botly.h>
Botly robot(SCOTT_V4);
void setup() {
robot.setSpeed(35);
robot.init();
}
void loop() {
robot.avancer(10);
robot.tournerGauche(90);
}<file_sep>/builder/build/sketch/sketch.ino.cpp
#include <Arduino.h>
#line 1 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
#line 1 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
#include <Botly.h>
Botly robot(SCOTT_V4);
#line 5 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
void setup();
#line 10 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
void loop();
#line 5 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
void setup() {
robot.setSpeed(35);
robot.init();
}
void loop() {
robot.avancer(10);
robot.tournerGauche(90);
}
<file_sep>/main.js
//Ardublockly
const electron = require('electron')
// Module to control application life.
const app = electron.app
// Module to create native browser window.
const BrowserWindow = electron.BrowserWindow
const path = require('path');
const url = require('url');
const ipc = electron.ipcMain;
const root = app.getAppPath();
let mainWindow
function createWindow() {
// Create the browser window.
mainWindow = new BrowserWindow({ width: 1280, height: 720, frame: true });
// and load the index.html of the app.
mainWindow.loadURL(url.format({
pathname: path.join(__dirname, 'index.html'),
protocol: 'file:',
slashes: true
}))
mainWindow.on('closed', function () {
mainWindow = null
})
Setting.repairFile();
initIpc();
}
function initIpc() {
ipc.on('set-compiler', function (event) {
const { dialog } = require('electron');
compilerLocation = dialog.showOpenDialog({ properties: ['openFile'] });
Setting.setCompiler(compilerLocation[0]);
var jsonResponse = { element: "text_input", display_text: Setting.getCompiler() };
event.sender.send('compiler-request-response', JSON.stringify(jsonResponse));
});
ipc.on('compiler-request', function (event) {
var jsonResponse = { element: "text_input", display_text: Setting.getCompiler() };
event.sender.send('compiler-request-response', JSON.stringify(jsonResponse));
});
ipc.on('serial-port-request', function (event) {
callback = function (jsonResponse) {
event.sender.send('serial-port-request-response', JSON.stringify(jsonResponse));
}
Serial.getPorts(callback);
});
ipc.on('set-serial-port', function (event, port) {
Setting.setSerialPort(port);
callback = function (jsonResponse) {
event.sender.send('serial-port-request-response', JSON.stringify(jsonResponse));
}
Serial.getPorts(callback);
});
ipc.on('robot-request', function (event) {
var jsonResponse = { element: "text_input", display_text: Setting.getRobot() };
event.sender.send('robot-request-response', JSON.stringify(jsonResponse));
});
ipc.on('set-robot', function (event, robot) {
Setting.setRobot(robot);
var jsonResponse = { element: "text_input", display_text: Setting.getRobot() };
event.sender.send('robot-request-response', JSON.stringify(jsonResponse));
});
ipc.on('code', function (event, arg) {
var fs = require('fs');
try { fs.writeFileSync(app.getAppPath() + '/builder/sketch/sketch.ino', arg, 'utf-8'); }
catch (e) { console.log('Failed to save the file !'); }
});
ipc.on('compile', function (event, method) {
if (fs.existsSync(app.getAppPath() + "/builder/sketch/sketch.ino")) {
Builder.compile(event, method);
}
});
ipc.on('flash', function (event) {
if (fs.existsSync(app.getAppPath() + "/builder/build/sketch.ino.hex")) {
Builder.flash(event);
}
});
ipc.on('openIDE', function (event) {
if (fs.existsSync(app.getAppPath() + "/builder/sketch/sketch.ino")) {
Builder.open();
}
});
}
app.on('ready', createWindow)
app.on('window-all-closed', function () {
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('activate', function () {
if (mainWindow === null) {
createWindow()
}
})
/************************************************
*
*
* Setting
*
*
*
*************************************************
*/
var Setting = {};
Setting.setCompiler = function (compiler) {
var jsonSetting = Setting.readSetting();
if (compiler != null) {
jsonSetting.compiler = compiler;
Setting.saveSetting(jsonSetting);
} else {
return false;
}
return true;
}
Setting.getCompiler = function () {
return Setting.readSetting().compiler;
}
Setting.setRobot = function (robot) {
var jsonSetting = Setting.readSetting();
if (robot != null) {
jsonSetting.robot = robot;
Setting.saveSetting(jsonSetting);
} else {
return false;
}
return true;
}
Setting.getRobot = function () {
console.log(this.readSetting().robot);
return Setting.readSetting().robot;
}
Setting.setSerialPort = function (port) {
jsonSetting = Setting.readSetting();
if (port != null) {
jsonSetting.serialport = port;
Setting.saveSetting(jsonSetting);
} else {
return false;
}
return true;
}
Setting.getSerialPort = function () {
return Setting.readSetting().serialport;
}
Setting.repairFile = function () {
Setting.saveSetting(Setting.readSetting());
}
Setting.readSetting = function () {
fs = require('fs');
spath = app.getAppPath() + "/setting.json";
content = fs.readFileSync(spath, 'utf-8')
json = Setting.parseToJson(content);
return json;
}
Setting.saveSetting = function (jsonSetting) {
fs = require('fs');
spath = app.getAppPath() + "/setting.json";
setting = JSON.stringify(jsonSetting, undefined, 2);
fs.writeFileSync(spath, setting);
}
Setting.parseToJson = function (data) {
jsonSetting = null;
try { jsonSetting = JSON.parse(data); }
catch (e) {
console.log(e);
}
if (jsonSetting == null) {
jsonSetting = { compiler: "", serialport: "", robot: "Botly" };
}
return jsonSetting;
}
/************************************************
*
*
* Builder
*
*
*
*************************************************
*/
var Builder = {};
const executablePath = app.getAppPath() + "/builder/arduino-builder.exe";
Builder.compile = function (event, method) {
compilerPath = executablePath;
var method = method;
if(Setting.getRobot() == "2") compilerFlag = "avr:uno"
else compilerFlag = "avr:LilyPadUSB"
var basepath = app.getAppPath();
var child = require('child_process').execFile;
var parameters = ["-compile",
"-verbose=false",
"-hardware=" + basepath + "/builder/hardware",
"-build-path=" + basepath + "/builder/build",
"-tools=" + basepath + "/builder/hardware/tools/avr",
"-tools=" + basepath + "/builder/tools-builder",
"-libraries=" + basepath + "/builder/libraries",
"-fqbn=arduino:" + compilerFlag,
"" + basepath + "/builder/sketch/sketch.ino"];
child(compilerPath, parameters, function (err, data) {
console.log(err)
console.log(data.toString());
var jsonResponse = {};
if (err) {
jsonResponse = { "element": "div_ide_output", "output": err, "success": "false", "method": method };
} else {
jsonResponse = { "element": "div_ide_output", "output": data.toString(), "success": "true", "method": method };
}
//console.log(jsonResponse);
event.sender.send('compile-response', JSON.stringify(jsonResponse));
});
}
Builder.open = function () {
compiler = Setting.getCompiler();
compilerPath = "";
if (compiler != "Default") {
compilerPath = compiler;
var basepath = app.getAppPath();
var child = require('child_process').execFile;
var parameters = [basepath + "/builder/sketch/sketch.ino"];
child(compilerPath, parameters, function (err, data) {
console.log(err)
console.log(data.toString());
});
}
}
Builder.flash = function (event) {
var Avrgirl = require('avrgirl-arduino');
var boardName = "lilypad-usb";
if(Setting.getRobot() == 2) var boardName = "uno"
try{
var avrgirl = new Avrgirl({
board: boardName,
port: Setting.getSerialPort(),
debug: true
});
avrgirl.flash(app.getAppPath() + '/builder/build/sketch.ino.hex', function (error) {
jsonResponse = {};
if (error) {
console.error(error);
jsonResponse = { "element": "div_ide_output", "output": error, "success": "false" };
} else {
console.info('done.');
jsonResponse = { "element": "div_ide_output", "output": "Téléversement terminé", "success": "true" };
}
event.sender.send('upload-response', JSON.stringify(jsonResponse));
});
}catch(e){
console.log(e);
jsonResponse = {};
jsonResponse = { "element": "div_ide_output", "output": error, "success": "false" };
event.sender.send('upload-response', JSON.stringify(jsonResponse));
}
}
/************************************************
*
*
* Serial
*
*
*
*************************************************
*/
var Serial = {};
Serial.getPorts = function (callback) {
SerialPort = require('serialport');
autoselect = null;
serial = [];
SerialPort.list(function (err, ports) {
ports.forEach(function (port) {
console.log(port);
if (Setting.getSerialPort() == port.comName) {
autoselect = port.comName;
serial.push({ "value": port.comName, "display_text": port.comName });
} else if (autoselect == null && (port.manufacturer == "FTDI" || port.productId == "6001")) {
autoselect = port.comName;
serial.push({ "value": port.comName, "display_text": port.comName + ' Botly' });
} else {
serial.push({ "value": port.comName, "display_text": port.comName });
}
});
result = { 'autoselect': autoselect, 'ports': serial };
callback(Serial.parseResponse(result));
});
}
Serial.parseResponse = function (portList) {
//console.log("Port:");
//console.log(portList.ports);
jsonResponse = {
"selected": "",
"element": "dropdown",
"response_type": "json",
"options": portList.ports
};
if (portList.autoselect != null) {
jsonResponse.selected = portList.autoselect;
} else if (portList.ports.length > 0) {
jsonResponse.selected = portList.ports[0].value;
//jsonResponse.options = [{ display_text: portList.ports[0].value, value: portList.ports[0].value }];
} else {
jsonResponse.selected = "No Serial port"
jsonResponse.options = [{ display_text: "No serial port", value: "" }];
}
return jsonResponse;
}<file_sep>/builder/build/preproc/ctags_target_for_gcc_minus_e.cpp
# 1 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
# 1 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino"
# 2 "C:\\Users\\jtopart\\Documents\\GitHub\\Releases\\BotlyStudio\\resources\\app\\builder\\sketch\\sketch.ino" 2
Botly robot(1 /* Version de base de Botly*/);
void setup() {
robot.setSpeed(35);
robot.init();
}
void loop() {
robot.avancer(10);
robot.tournerGauche(90);
}
| 4cad4e3266c57ba9299733548e227068d359894c | [
"JavaScript",
"C++"
] | 4 | C++ | YannPaulmier/BotlyStudio-App | fb17c2b916762301e684f5c6e35274f63d5c77fb | 16565e2d26ba1713844e53ce9678b708e99816c2 |
refs/heads/master | <repo_name>abogoslov/annuity-loan<file_sep>/src/main/resources/logging.properties
handlers = java.util.logging.FileHandler
.level=ALL
java.util.logging.FileHandler.level = ALL
java.util.logging.FileHandler.pattern = money_loan_log.txt
java.util.logging.FileHandler.limit = 1000000
java.util.logging.FileHandler.count = 5
java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter<file_sep>/src/main/resources/data.sql
DROP TABLE IF EXISTS clients;
CREATE TABLE clients(
id INTEGER NOT NULL AUTO_INCREMENT,
first_name VARCHAR2(50),
surname VARCHAR2(50),
doc_id BIGINT,
PRIMARY KEY (id)
);
INSERT INTO clients(first_name, surname, doc_id) VALUES ('Антон', 'Богослов', '1234567890');
DROP TABLE IF EXISTS requests;
CREATE TABLE requests(
id INTEGER NOT NULL AUTO_INCREMENT,
sum DECIMAL(15, 2),
date DATE,
duration INTEGER,
monthly_charge DECIMAL(15, 2),
client_id INTEGER,
PRIMARY KEY (id)
);
INSERT INTO requests(sum, date, duration, client_id) VALUES ('100000','2017-12-06','6', '1');<file_sep>/src/main/java/loan/model/MonthSchedule.java
package loan.model;
import java.math.BigDecimal;
/**
* @author A.Bogoslov
*/
public class MonthSchedule {
private int monthIndex;
private BigDecimal loanBalance;
private BigDecimal primaryPayment;
private BigDecimal percents;
public int setMonthIndex() {
return monthIndex;
}
public int getMonthIndex() {
return monthIndex;
}
public BigDecimal getLoanBalance() {
return loanBalance;
}
public BigDecimal getPrimaryPayment() {
return primaryPayment;
}
public BigDecimal getPercents() {
return percents;
}
public void setMonthIndex(int monthIndex) {
this.monthIndex = monthIndex;
}
public void setLoanBalance(BigDecimal loanBalance) {
this.loanBalance = loanBalance;
}
public void setPrimaryPayment(BigDecimal primaryPayment) {
this.primaryPayment = primaryPayment;
}
public void setPercents(BigDecimal percents) {
this.percents = percents;
}
}
<file_sep>/src/main/java/loan/service/impl/RequestServiceImpl.java
package loan.service.impl;
import com.google.common.collect.Lists;
import loan.db.RequestRepository;
import loan.model.Request;
import loan.service.RequestService;
import loan.util.Utils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @author A.Bogoslov
*/
@Service
@Repository
public class RequestServiceImpl implements RequestService {
private final RequestRepository repository;
private final Utils utils;
@Autowired
public RequestServiceImpl(RequestRepository repository, Utils utils) {
this.repository = repository;
this.utils = utils;
}
@Override
public void createRequest(Request request) {
request.setMonthlyCharge(utils.calcMonthlyCharge(request));
repository.save(request);
}
@Override
public List<Request> listRequests() {
return Lists.newArrayList(repository.findAll());
}
@Override
public Request getRequest(int id) {
return repository.findOne(id);
}
@Override
public void updateRequest(Request request) {
request.setMonthlyCharge(utils.calcMonthlyCharge(request));
repository.save(request);
}
@Override
public void removeRequest(int id) {
repository.delete(id);
}
}
<file_sep>/README.md
# annuity-loan
_Web-app calculating payments for annuity loan scheme_<br/>
<br/>
**CRUD** with requests to loan<br>
<br/>
<br/>
**P.S.**<br/>
Spring Boot (MVC + Data + Test, Thymeleaf)<br/>
JQuery<br/>
Bootstrap<br/>
JUnit, Mockito<br/><file_sep>/src/test/java/loan/mvc/MvcTest.java
package loan.mvc;
import loan.service.ClientService;
import loan.service.RequestService;
import loan.util.Utils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author A.Bogoslov
*/
@RunWith(SpringRunner.class)
@WebMvcTest(MainController.class)
public class MvcTest {
@Autowired
private MockMvc mvc;
@MockBean
private RequestService requestService;
@MockBean
private ClientService clientService;
@MockBean
private Utils utils;
@Test
public void testIndex() throws Exception {
mvc.perform(post("/")
.accept(MediaType.TEXT_PLAIN))
.andExpect(status().isOk());
mvc.perform(get("/")
.accept(MediaType.TEXT_PLAIN))
.andExpect(status().isOk());
}
@Test
public void testSuccess() throws Exception {
mvc.perform(get("/success")
.accept(MediaType.TEXT_PLAIN))
.andExpect(status().isOk());
mvc.perform(post("/success")
.accept(MediaType.TEXT_PLAIN))
.andExpect(status().isOk());
}
}
| fd6d49ae8f5ac2eb72a91bfc4d7c29dd1275197a | [
"Java",
"SQL",
"Markdown",
"INI"
] | 6 | INI | abogoslov/annuity-loan | 66b83b5e3d3853bc8b44ce869165653f1f3195ef | 2406e868d2a04b6f778de074eaaadaf2aedf03c8 |
refs/heads/master | <file_sep>package com.ysotek.support.business.dao;
import com.ysotek.support.domain.User;
import java.util.List;
import java.util.Map;
/**
* Created by XLP on 2016/3/31.
* 用户的dao层
*/
public interface UserDao {
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 获取总记录数
*/
public Long totalRecord(Map<String, String> queryHash);
/**
* 分页列表
*/
public List<User> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash);
/**
* id获取记录
*/
public User findModel(Long userId);
/**
* 增加记录
*/
public Integer add(User model);
/**
* 修改记录
*/
public Integer update(User model);
/**
* 删除记录
*/
public Integer delete(Long userId);
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}
<file_sep>package com.ysotek.support.business.dao.impl;
import com.vt1314.base.sugar.data.QueryUtils;
import com.ysotek.support.business.dao.ProjectDao;
import com.ysotek.support.domain.Project;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 10/28/2015.
*/
@Repository("projectDao")
public class ProjectDaoImpl implements ProjectDao {
private final static Logger logger = LoggerFactory.getLogger(ProjectDaoImpl.class);
@PersistenceContext
private EntityManager entityManager;
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
public Map<String, Object> getSearchCondition(Map<String, String> queryHash) {
Map<String, Object> conditionHash = new HashMap<>();
if (queryHash == null) {
return conditionHash;
}
// String loginName = queryHash.get("loginName");
// if (!StringUtils.isEmpty(loginName)) {
// conditionHash.put("loginName = ?{paramIndex} ", loginName);
// }
//
// String loginPassword = queryHash.get("loginPassword");
// if (!StringUtils.isEmpty(loginPassword)) {
// conditionHash.put("loginPassword = ?{paramIndex} ", loginPassword);
// }
/*String String = queryHash.get("String");
if (!StringUtils.isEmpty(String)) {
conditionHash.put("String like ?{paramIndex} ", "%" + String + "%");
}
Integer Integer = TypeConvertUtils.StringToInteger(queryHash.get("Integer"));
if (Integer != null && Integer > -1) {
conditionHash.put("Integer = ?{paramIndex} ", Integer);
}
Date Date = TypeConvertUtils.StringToDate(queryHash.get("Date"));
if (Date != null) {
conditionHash.put("Date >= ?{paramIndex} ", Date);
}*/
return conditionHash;
}
@Override
public Long totalRecord(Map<String, String> queryHash) {
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<Long> typedQuery = QueryUtils.getTypedQueryByCondition("select count(c) from CustomerUser c ", conditions, "", entityManager, Long.class);
return typedQuery.getSingleResult();
}
@Override
public List<Project> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash) {
if (StringUtils.isEmpty(sqlOrder)) {
sqlOrder = "order by p.projectId desc ";
}
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<Project> typedQuery = QueryUtils.getTypedQueryByCondition("select p from Project p ", conditions, sqlOrder, entityManager, Project.class);
// 判断是否需要分页,并提交分页方法
if (pageSize > 0 && pageNow > 0) {
logger.debug("提交了分页查询信息,pageNow为[" + pageNow + "],pageSize为[" + pageSize + "]");
int minLimit = pageSize * (pageNow - 1);
int maxLimit = pageSize;
typedQuery.setFirstResult(minLimit).setMaxResults(maxLimit);
}
// 返回查询结果
return typedQuery.getResultList();
}
@Override
public Project findModel(Long projectId) {
return entityManager.find(Project.class, projectId);
}
@Override
public Integer add(Project model) {
model.setCreateDate(new Date());
entityManager.persist(model);
logger.info("ProjectDaoImpl添加Project成功!");
return 1;
}
@Override
public Integer update(Project model) {
Project existentProject = entityManager.find(Project.class, model.getProjectId());
existentProject.setProjectName(model.getProjectName());
existentProject.setProjectDescription(model.getProjectDescription());
existentProject.setUpdateDate(new Date());
return 1;
}
@Override
public Integer delete(Long projectId) {
Project existProject = entityManager.find(Project.class, projectId);
entityManager.remove(existProject);
return 1;
}
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.business.dao.impl;
import com.vt1314.base.sugar.data.QueryUtils;
import com.ysotek.support.business.dao.CustomerUserDao;
import com.ysotek.support.domain.CustomerUser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 10/28/2015.
*/
@Repository("customerUserDao")
public class CustomerUserDaoImpl implements CustomerUserDao {
private final static Logger logger = LoggerFactory.getLogger(CustomerUserDaoImpl.class);
@PersistenceContext
private EntityManager entityManager;
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
public Map<String, Object> getSearchCondition(Map<String, String> queryHash) {
Map<String, Object> conditionHash = new HashMap<>();
if (queryHash == null) {
return conditionHash;
}
String loginName = queryHash.get("loginName");
if (!StringUtils.isEmpty(loginName)) {
conditionHash.put("loginName = ?{paramIndex} ", loginName);
}
String loginPassword = queryHash.get("loginPassword");
if (!StringUtils.isEmpty(loginPassword)) {
conditionHash.put("loginPassword = ?{paramIndex} ", loginPassword);
}
/*String String = queryHash.get("String");
if (!StringUtils.isEmpty(String)) {
conditionHash.put("String like ?{paramIndex} ", "%" + String + "%");
}
Integer Integer = TypeConvertUtils.StringToInteger(queryHash.get("Integer"));
if (Integer != null && Integer > -1) {
conditionHash.put("Integer = ?{paramIndex} ", Integer);
}
Date Date = TypeConvertUtils.StringToDate(queryHash.get("Date"));
if (Date != null) {
conditionHash.put("Date >= ?{paramIndex} ", Date);
}*/
return conditionHash;
}
@Override
public Long totalRecord(Map<String, String> queryHash) {
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<Long> typedQuery = QueryUtils.getTypedQueryByCondition("select count(c) from CustomerUser c ", conditions, "", entityManager, Long.class);
return typedQuery.getSingleResult();
}
@Override
public List<CustomerUser> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash) {
if (StringUtils.isEmpty(sqlOrder)) {
sqlOrder = "order by c.customerUserId desc ";
}
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<CustomerUser> typedQuery = QueryUtils.getTypedQueryByCondition("select c from CustomerUser c ", conditions, sqlOrder, entityManager, CustomerUser.class);
// 判断是否需要分页,并提交分页方法
if (pageSize > 0 && pageNow > 0) {
logger.debug("提交了分页查询信息,pageNow为[" + pageNow + "],pageSize为[" + pageSize + "]");
int minLimit = pageSize * (pageNow - 1);
int maxLimit = pageSize;
typedQuery.setFirstResult(minLimit).setMaxResults(maxLimit);
}
// 返回查询结果
return typedQuery.getResultList();
}
@Override
public CustomerUser findModel(Long customerUserId) {
return entityManager.find(CustomerUser.class, customerUserId);
}
@Override
public Integer add(CustomerUser model) {
model.setCreateDate(new Date());
entityManager.persist(model);
logger.info("CustomerUserDaoImpl添加CustomerUser成功!");
return 1;
}
@Override
public Integer update(CustomerUser model) {
CustomerUser existentUser = entityManager.find(CustomerUser.class, model.getCustomerUserId());
existentUser.setLoginName(model.getLoginName());
existentUser.setLoginPassword(model.getLoginPassword());
existentUser.setIcon(model.getIcon());
existentUser.setPhoneNumber(model.getPhoneNumber());
existentUser.setRealName(model.getRealName());
existentUser.setProjectSet(model.getProjectSet());
existentUser.setUpdateDate(new Date());
return 1;
}
@Override
public Integer delete(Long CustomerUserId) {
CustomerUser existCustomerUser = entityManager.find(CustomerUser.class, CustomerUserId);
entityManager.remove(existCustomerUser);
return 1;
}
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.business.dao;
import com.ysotek.support.domain.Project;
import java.util.List;
import java.util.Map;
/**
* 项目的dao层
*/
public interface ProjectDao {
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 获取总记录数
*/
public Long totalRecord(Map<String, String> queryHash);
/**
* 分页列表
*/
public List<Project> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash);
/**
* id获取记录
*/
public Project findModel(Long projectId);
/**
* 增加记录
*/
public Integer add(Project model);
/**
* 修改记录
*/
public Integer update(Project model);
/**
* 删除记录
*/
public Integer delete(Long projectId);
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.business.biz;
import com.ysotek.support.domain.CustomerUser;
import com.ysotek.support.domain.Project;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 10/28/2015.
*/
@SuppressWarnings("unused")
public interface CustomerUserBiz {
/**
* 登录
*
* @param agentName(登录名)
* @param password(密码)
*/
CustomerUser login(String agentName, String password);
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 获取总记录数
*/
public Long totalRecord();
/**
* 获取总记录数
*/
public Long totalRecord(Map<String, String> queryHash);
/**
* 列表不分页
*/
public List<CustomerUser> findList();
/**
* 列表不分页
*/
public List<CustomerUser> findList(Map<String, String> queryHash);
/**
* 分页列表
*/
public List<CustomerUser> findList(Integer pageNow, Integer pageSize);
/**
* 分页列表
*/
public List<CustomerUser> findList(Integer pageNow, Integer pageSize, Map<String, String> queryHash);
/**
* 分页列表
*/
public List<CustomerUser> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash);
/**
* id获取记录
*/
public CustomerUser findModel(Long customerUserId);
/**
* 增加或修改记录
*/
public void addOrUpdate(CustomerUser model);
/**
* 删除记录
*/
public void delete(Long customerUserId);
public Project addProjectToCustomerUser(Long customer_user_Id, Long projectId);
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.business.dao.impl;
import com.vt1314.base.sugar.data.QueryUtils;
import com.ysotek.support.business.dao.MessageDao;
import com.ysotek.support.domain.Message;
import com.ysotek.support.domain.WorkOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 10/28/2015.
*/
@Repository("messageDao")
public class MessageDaoImpl implements MessageDao {
private final static Logger logger = LoggerFactory.getLogger(MessageDaoImpl.class);
@PersistenceContext
private EntityManager entityManager;
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
public Map<String, Object> getSearchCondition(Map<String, String> queryHash) {
Map<String, Object> conditionHash = new HashMap<>();
if (queryHash == null) {
return conditionHash;
}
Long workOrderId = Long.parseLong(queryHash.get("workOrderId"));
if (!StringUtils.isEmpty(workOrderId)) {
conditionHash.put("belongWorkOrder.workOrderId = ?{paramIndex} ", workOrderId);
}
// String loginName = queryHash.get("loginName");
// if (!StringUtils.isEmpty(loginName)) {
// conditionHash.put("loginName = ?{paramIndex} ", loginName);
// }
//
// String loginPassword = queryHash.get("loginPassword");
// if (!StringUtils.isEmpty(loginPassword)) {
// conditionHash.put("loginPassword = ?{paramIndex} ", loginPassword);
// }
/*String String = queryHash.get("String");
if (!StringUtils.isEmpty(String)) {
conditionHash.put("String like ?{paramIndex} ", "%" + String + "%");
}
Integer Integer = TypeConvertUtils.StringToInteger(queryHash.get("Integer"));
if (Integer != null && Integer > -1) {
conditionHash.put("Integer = ?{paramIndex} ", Integer);
}
Date Date = TypeConvertUtils.StringToDate(queryHash.get("Date"));
if (Date != null) {
conditionHash.put("Date >= ?{paramIndex} ", Date);
}*/
return conditionHash;
}
@Override
@Transactional
public void deleteByWorkOrder(WorkOrder workOrder) {
Query query = entityManager.createQuery("delete from Message m where m.belongWorkOrder= :workOrder").setParameter("workOrder", workOrder);
query.executeUpdate();
}
@Override
public List<Long> selectMessageIdByWorkOrder(WorkOrder workOrder) {
Query query = entityManager.createQuery("select messageId from Message m where m.belongWorkOrder= :workOrder").setParameter("workOrder", workOrder);
return query.getResultList();
}
@Override
public Long totalRecord(Map<String, String> queryHash) {
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<Long> typedQuery = QueryUtils.getTypedQueryByCondition("select count(m) from Message m ", conditions, "", entityManager, Long.class);
return typedQuery.getSingleResult();
}
@Override
public List<Message> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash) {
if (StringUtils.isEmpty(sqlOrder)) {
sqlOrder = "order by m.messageId asc ";
}
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<Message> typedQuery = QueryUtils.getTypedQueryByCondition("select m from Message m ", conditions, sqlOrder, entityManager, Message.class);
// 判断是否需要分页,并提交分页方法
if (pageSize > 0 && pageNow > 0) {
logger.debug("提交了分页查询信息,pageNow为[" + pageNow + "],pageSize为[" + pageSize + "]");
int minLimit = pageSize * (pageNow - 1);
int maxLimit = pageSize;
typedQuery.setFirstResult(minLimit).setMaxResults(maxLimit);
}
// 返回查询结果
return typedQuery.getResultList();
}
@Override
public Message findModel(Long messageId) {
return entityManager.find(Message.class, messageId);
}
@Override
public Integer add(Message model) {
model.setCreateDate(new Date());
entityManager.persist(model);
logger.info("MessageDaoImpl添加Message成功!");
return 1;
}
@Override
public Integer update(Message model) {
// Message existentMessage = entityManager.find(Message.class, model.getMessageId());
// existentMessage.setProjectName(model.getProjectName());
// existentMessage.setProjectDescription(model.getProjectDescription());
// existentMessage.setUpdateDate(new Date());
return 1;
}
@Override
public Integer delete(Long projectId) {
Message existMessage = entityManager.find(Message.class, projectId);
entityManager.remove(existMessage);
return 1;
}
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.domain.enumeration;
/**工单类型分类
* Created by XLP on 2016/3/29.
*/
public enum WorkOrderStyle {
bug{ //bug
public String getName(){
return "bug";
}
},
enhancement{ //优化
public String getName(){
return "enhancement";
}
},
feature{ //属性
public String getName(){
return "feature";
}
},
duplicate{ //重复
public String getName(){
return "duplicate";
}
},
invalid{ //无效
public String getName(){
return "invalid";
}
},
question{ //问题
public String getName(){
return "question";
}
},
wontfix{
public String getName(){
return "wontfix";
}
};
public abstract String getName();
}
<file_sep>package com.vt1314.base.security.action.dashboard;
import com.vt1314.base.security.action.converter.AdminUserConverter;
import com.vt1314.base.security.business.biz.ManagerBiz;
import com.vt1314.base.security.domain.Manager;
import com.vt1314.base.sugar.tools.StringConverters;
import com.ysotek.support.business.biz.UserBiz;
import com.ysotek.support.domain.User;
import com.ysotek.support.domain.enumeration.Position;
import net.sf.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Author: val.jzp
*/
@Controller
@RequestMapping("/web/manager")
@SuppressWarnings("unused")
public class AdminUserAction {
private final static Logger logger = LoggerFactory.getLogger(AdminUserAction.class);
private final static Integer DEFAULT_PAGE_LIST_NUM = 20;
@Autowired
private ManagerBiz managerBiz;
@Autowired
private UserBiz userBiz;
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 打开列表页面
*/
@RequestMapping("/getManagerListPage")
public ModelAndView getAdminUserListPage() {
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("pages/manager/adminUserList");
return modelAndView;
}
/**
* 分页获取JSON数据
*/
@RequestMapping("/getManagerListJSON")
@ResponseBody
public JSONObject getAdminUserListJSON(@RequestParam(value = "page", required = false) String pageNowParam,
@RequestParam(value = "rows", required = false) String pageSizeParam) {
Integer pageNow = StringConverters.ToInteger(pageNowParam);
Integer pageSize = StringConverters.ToInteger(pageSizeParam);
if (pageNow == null || pageSize == null) {
pageNow = 1;
pageSize = DEFAULT_PAGE_LIST_NUM;
}
Map map = new HashMap();
map.put("position", Position.manager.getName());
List<User> userList = userBiz.findList(pageNow, pageSize,map);
Long totalNum = userBiz.totalRecord(map);
return AdminUserConverter.getJson(userList, totalNum);
}
/**
* 获取详情页面
*/
@RequestMapping("/getManagerViewPage")
public ModelAndView getAdminUserViewPage(@RequestParam(value = "managerId", required = false) String adminUserIdParam) {
Long adminUserId = StringConverters.ToLong(adminUserIdParam);
User user = null;
if (adminUserId != null) {
user = userBiz.findModel(adminUserId);
}
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("pages/manager/adminUserViewPart");
modelAndView.addObject("manager", user);
return modelAndView;
}
/**
* 获取编辑页面
*/
@RequestMapping("/getManagerEditPage")
public ModelAndView getAdminUserEditPage(@RequestParam(value = "managerId", required = false) String adminUserIdParam) {
Long adminUserId = StringConverters.ToLong(adminUserIdParam);
User user = null;
if (adminUserId != null) {
user = userBiz.findModel(adminUserId);
}
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("pages/manager/adminUserEditPart");
modelAndView.addObject("manager", user);
return modelAndView;
}
/**
* 执行提交的新增或修改请求
*/
@RequestMapping("/executeManagerEdit")
@ResponseBody
public String executeManagerEdit(User user) {
user.setPosition(Position.manager);
userBiz.addOrUpdate(user);
return "1";
}
/**
* 逻辑删除机构用户信息
*/
@RequestMapping("/logicRemoveManager")
@ResponseBody
public String logicRemoveAdminUser(@RequestParam(value = "adminUserId", required = false) String adminUserIdParam,
@RequestParam(value = "isFakeDelete", required = false) String isFakeDelete) {
Long adminUserId = StringConverters.ToLong(adminUserIdParam);
userBiz.delete(adminUserId);
return "1";
}
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.domain.enumeration;
/**
* Created by zavier.zhang on 10/23/2015.
*/
public enum WorkOrderState {
NEW_ORDER {
public String getName() {
return "新工单";
}
},
ORDER_ACCEPT {
public String getName() {
return "已接受";
}
},
ORDER_HANDLE {
public String getName() {
return "处理中";
}
},
ORDER_FEEDBACK {
public String getName() {
return "已反馈";
}
},
ORDER_CLOSE {
public String getName() {
return "已关闭";
}
};
public abstract String getName();
}
<file_sep>package com.ysotek.support.business.biz;
import com.ysotek.support.domain.Project;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 10/28/2015.
*/
@SuppressWarnings("unused")
public interface ProjectBiz {
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 获取总记录数
*/
public Long totalRecord();
/**
* 获取总记录数
*/
public Long totalRecord(Map<String, String> queryHash);
/**
* 列表不分页
*/
public List<Project> findList();
/**
* 列表不分页
*/
public List<Project> findList(Map<String, String> queryHash);
/**
* 分页列表
*/
public List<Project> findList(Integer pageNow, Integer pageSize);
/**
* 分页列表
*/
public List<Project> findList(Integer pageNow, Integer pageSize, Map<String, String> queryHash);
/**
* 分页列表
*/
public List<Project> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash);
/**
* id获取记录
*/
public Project findModel(Long projectId);
/**
* 增加或修改记录
*/
public void addOrUpdate(Project model);
/**
* 删除记录
*/
public void delete(Long projectId);
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.business.dao.impl;
import com.vt1314.base.sugar.data.QueryUtils;
import com.ysotek.support.business.dao.MessageAttachmentDao;
import com.ysotek.support.domain.Message;
import com.ysotek.support.domain.MessageAttachment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import java.beans.Transient;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 10/28/2015.
*/
@Repository("messageAttachmentDao")
public class MessageAttachmentDaoImpl implements MessageAttachmentDao {
private final static Logger logger = LoggerFactory.getLogger(MessageAttachmentDaoImpl.class);
@PersistenceContext
private EntityManager entityManager;
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
public Map<String, Object> getSearchCondition(Map<String, String> queryHash) {
Map<String, Object> conditionHash = new HashMap<>();
if (queryHash == null) {
return conditionHash;
}
// Long workOrderId=Long.parseLong(queryHash.get("workOrderId"));
// if (!StringUtils.isEmpty(workOrderId)) {
// conditionHash.put("belongWorkOrder.workOrderId = ?{paramIndex} ", workOrderId);
// }
// String loginName = queryHash.get("loginName");
// if (!StringUtils.isEmpty(loginName)) {
// conditionHash.put("loginName = ?{paramIndex} ", loginName);
// }
//
// String loginPassword = queryHash.get("loginPassword");
// if (!StringUtils.isEmpty(loginPassword)) {
// conditionHash.put("loginPassword = ?{paramIndex} ", loginPassword);
// }
/*String String = queryHash.get("String");
if (!StringUtils.isEmpty(String)) {
conditionHash.put("String like ?{paramIndex} ", "%" + String + "%");
}
Integer Integer = TypeConvertUtils.StringToInteger(queryHash.get("Integer"));
if (Integer != null && Integer > -1) {
conditionHash.put("Integer = ?{paramIndex} ", Integer);
}
Date Date = TypeConvertUtils.StringToDate(queryHash.get("Date"));
if (Date != null) {
conditionHash.put("Date >= ?{paramIndex} ", Date);
}*/
return conditionHash;
}
@Override
@Transactional
public void deleteByMessageId(Long messageId) {
Query query = entityManager.createQuery("delete from MessageAttachment ma where ma.belongMessage.messageId= :messageId").setParameter("messageId", messageId);
query.executeUpdate();
}
@Override
public Long totalRecord(Map<String, String> queryHash) {
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<Long> typedQuery = QueryUtils.getTypedQueryByCondition("select count(m) from MessageAttachment m ", conditions, "", entityManager, Long.class);
return typedQuery.getSingleResult();
}
@Override
public List<MessageAttachment> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash) {
if (StringUtils.isEmpty(sqlOrder)) {
sqlOrder = "order by m.messageAttachmentId desc ";
}
Map<String, Object> conditions = getSearchCondition(queryHash);
TypedQuery<MessageAttachment> typedQuery = QueryUtils.getTypedQueryByCondition("select m from MessageAttachment m ", conditions, sqlOrder, entityManager, MessageAttachment.class);
// 判断是否需要分页,并提交分页方法
if (pageSize > 0 && pageNow > 0) {
logger.debug("提交了分页查询信息,pageNow为[" + pageNow + "],pageSize为[" + pageSize + "]");
int minLimit = pageSize * (pageNow - 1);
int maxLimit = pageSize;
typedQuery.setFirstResult(minLimit).setMaxResults(maxLimit);
}
// 返回查询结果
return typedQuery.getResultList();
}
@Override
public MessageAttachment findModel(Long messageId) {
return entityManager.find(MessageAttachment.class, messageId);
}
@Override
@Transient
public Integer add(MessageAttachment model) {
model.setCreateDate(new Date());
entityManager.persist(model);
logger.info("MessageAttachmentDaoImpl添加MessageAttachment成功!");
return 1;
}
@Override
public Integer update(MessageAttachment model) {
// Message existentMessage = entityManager.find(Message.class, model.getMessageId());
// existentMessage.setProjectName(model.getProjectName());
// existentMessage.setProjectDescription(model.getProjectDescription());
// existentMessage.setUpdateDate(new Date());
return 1;
}
@Override
public Integer delete(Long projectId) {
MessageAttachment existMessage = entityManager.find(MessageAttachment.class, projectId);
entityManager.remove(existMessage);
return 1;
}
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.domain.enumeration;
/**
* Created by XLP on 2016/3/8.
* 优先级
*/
public enum Priority {
//一般
General{
public String getName(){
return "一般";
}
},
//紧急
Urgent{
public String getName(){
return "紧急";
}
},
//非常紧急
SuperUrgent{
public String getName(){
return "非常紧急";
}
};
public abstract String getName();
}
<file_sep>package com.ysotek.support.business.biz;
import com.ysotek.support.domain.WorkOrder;
import com.ysotek.support.domain.enumeration.WorkOrderState;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 11/02/2015.
*/
@SuppressWarnings("unused")
public interface WorkOrderBiz {
public List<WorkOrder> findByProjectId(Long projectId);
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 获取总记录数
*/
public Long totalRecord();
/**
* 获取总记录数
*/
public Long totalRecord(Map<String, String> queryHash);
/**
* 列表不分页
*/
public List<WorkOrder> findList();
/**
* 列表不分页
*/
public List<WorkOrder> findList(Map<String, String> queryHash);
/**
* 分页列表
*/
public List<WorkOrder> findList(Integer pageNow, Integer pageSize);
/**
* 分页列表
*/
public List<WorkOrder> findList(Integer pageNow, Integer pageSize, Map<String, String> queryHash);
/**
* 分页列表
*/
public List<WorkOrder> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash);
/**
* id获取记录
*/
public WorkOrder findModel(Long workOrderId);
/**
* 增加或修改记录
*/
public void addOrUpdate(WorkOrder model);
/**
* 删除记录
*/
public void delete(Long workOrderId);
/**
* 关闭工单
*/
public void changeState(Long workOrderId,WorkOrderState state);
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.domain.enumeration;
/**
* 交流类型
* Created by zavier.zhang on 10/23/2015.
*/
public enum MessageType {
//客户
TYPE_CUSTOMER {
public String getName() {
return "客服";
}
},
//客服
TYPE_SERVICER {
public String getName() {
return "客服";
}
};
public abstract String getName();
}
<file_sep>package com.ysotek.support.domain.enumeration;
/**
* 工单问题分类
* Created by zavier.zhang on 10/23/2015.
*/
public enum WorkOrderType {
//关系型数据库
Type_DB {
public String getName() {
return "关系型数据库";
}
},
//对象存储
TYPE_STORAGE{
public String getName() {
return "对象存储";
}
};
public abstract String getName();
}
<file_sep>package com.ysotek.support.domain.enumeration;
/**
* Created by XLP on 2016/3/31.
*/
public enum Position {
//客服
manager{
public String getName(){
return "客服";
}
},
//客户
customerUser{
public String getName(){
return "客户";
}
};
public abstract String getName();
}
<file_sep>package com.ysotek.support.business.biz;
import com.ysotek.support.domain.Message;
import com.ysotek.support.domain.WorkOrder;
import java.util.List;
import java.util.Map;
/**
* Created by zavier.zhang on 11/03/2015.
*/
@SuppressWarnings("unused")
public interface MessageBiz {
/**
* 根据workOrder删除
*/
public void deleteByWorkOrder(WorkOrder workOrder);
public List<Long> getMessageIdByworkOrderId(WorkOrder workOrder);
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
/**
* 获取总记录数
*/
public Long totalRecord();
/**
* 获取总记录数
*/
public Long totalRecord(Map<String, String> queryHash);
/**
* 列表不分页
*/
public List<Message> findList();
/**
* 列表不分页
*/
public List<Message> findList(Map<String, String> queryHash);
/**
* 分页列表
*/
public List<Message> findList(Integer pageNow, Integer pageSize);
/**
* 分页列表
*/
public List<Message> findList(Integer pageNow, Integer pageSize, Map<String, String> queryHash);
/**
* 分页列表
*/
public List<Message> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash);
/**
* id获取记录
*/
public Message findModel(Long projectId);
/**
* 增加或修改记录
*/
public void addOrUpdate(Message model);
/**
* 删除记录
*/
public void delete(Long projectId);
// ******************************************************************************
// ********************************** CRUD END **********************************
// ******************************************************************************
}<file_sep>package com.ysotek.support.business.biz.impl;
import com.ysotek.support.business.biz.PriorityBiz;
import com.ysotek.support.business.dao.PriorityDao;
import com.ysotek.support.domain.Priority;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Map;
/**
* Created by XLP on 2016/3/9.
*/
@Service("priorityBiz")
public class PriorityBizImpl implements PriorityBiz{
private Logger logger = LoggerFactory.getLogger(ProjectBizImpl.class);
@Autowired
PriorityDao priorityDao;
// ******************************************************************************
// ********************************* CRUD START *********************************
// ******************************************************************************
@Override
public Long totalRecord() {
return this.totalRecord(null);
}
@Override
public Long totalRecord(Map<String, String> queryHash) {
return priorityDao.totalRecord(queryHash);
}
@Override
public List<Priority> findList() {
return this.findList(0, 0, null);
}
@Override
public List<Priority> findList(Map<String, String> queryHash) {
return this.findList(0, 0, queryHash);
}
@Override
public List<Priority> findList(Integer pageNow, Integer pageSize) {
return this.findList(pageNow, pageSize, null);
}
@Override
public List<Priority> findList(Integer pageNow, Integer pageSize, Map<String, String> queryHash) {
return this.findList(pageNow, pageSize, "", queryHash);
}
@Override
public List<Priority> findList(Integer pageNow, Integer pageSize, String sqlOrder, Map<String, String> queryHash) {
return priorityDao.findList(pageNow, pageSize, sqlOrder, queryHash);
}
@Override
public Priority findModel(Long priorityId) {
return priorityDao.findModel(priorityId);
}
@Override
@Transactional
public void addOrUpdate(Priority model) {
if (model.getPriorityId() != null && model.getPriorityId() > 0) {
priorityDao.update(model);
} else {
priorityDao.add(model);
}
}
@Override
@Transactional
public void delete(Long priorityId) {
priorityDao.delete(priorityId);
}
@Override
public Priority findModelByPriorityName(String priorityName) {
return priorityDao.findModelByPriorityName(priorityName);
}
}
<file_sep>package com.ysotek.support.action.converter;
import com.vt1314.base.sugar.tools.CommonSugar;
import com.vt1314.base.sugar.tools.DateTimeUtils;
import com.ysotek.support.domain.Project;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import java.util.List;
/**
* Created by zavier.zhang on 10/28/2015.
*/
public class ProjectConverter {
public static JSONObject getJson(List<Project> projectList, Long totalNum) {
JSONObject result = new JSONObject();
JSONArray jsonArray = new JSONArray();
for (Project project : projectList) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("projectId", CommonSugar.getTypedDefault(project.getProjectId(), 0L));
jsonObject.put("projectName", CommonSugar.getTypedDefault(project.getProjectName(), ""));
jsonObject.put("projectDescription", CommonSugar.getTypedDefault(project.getProjectDescription(), ""));
jsonObject.put("createDate", CommonSugar.getTypedDefault(DateTimeUtils.formatDateToString(project.getCreateDate()), ""));
jsonObject.put("updateDate", CommonSugar.getTypedDefault(DateTimeUtils.formatDateToString(project.getUpdateDate()), ""));
jsonArray.add(jsonObject);
}
result.put("total", totalNum);
result.put("rows", jsonArray);
return result;
}
} | e44640a20d47b7e8d216414274d8721c5b02fc6d | [
"Java"
] | 19 | Java | Jakye/Base_Project | 36065612c2b7ad0f8689e86c9967857cc7644598 | 8af456433cd3bea48a7fa57167a926a5ce46121f |
refs/heads/master | <repo_name>embedix/stag-web<file_sep>/export/vozy/vozy___.ini
[default]
_verze=122
typ=hnaci|vuz
popis=Popis vozu
vykon=123 (kW)
hmotnost=12 (t)
delka=23 (m)
osob=50
typ_nakladu=sypke hmoty
naklad=20 (t)
max_rych=100 (kmh)
max_rych_hnaci=80 (kmh)
elektro=0 (potreba el. proudu: 0-nic,
[110]
typ=hnaci
podtrida=El. stejnosměrné
popis=Posunovací lokomotiva
vyrobce=Škoda Plzeň, 1971
vykon=800
sila=160
hmotnost=72
delka=14,40
max_rych=80
max_rych_hnaci=80
elektro=1
[111]
typ=hnaci
podtrida=El. stejnosměrné
popis=Posunovací lokomotiva
vyrobce=Škoda Plzeň (1981)
vykon=760
sila=186
hmotnost=70
delka=14,40
max_rych=80
max_rych_hnaci=80
elektro=1
[112]
typ=hnaci
podtrida=El. stejnosměrné
popis=Posunovací lokomotiva
vyrobce=Škoda Plzeň (1971,reko
vykon=760
sila=186
hmotnost=68
delka=14,40
max_rych=80
max_rych_hnaci=80
elektro=1
[114]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1990)
vykon=1600
sila=180
hmotnost=72
delka=14.56
max_rych=100
max_rych_hnaci=100
elektro=1
[121]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1960)
vykon=2032
sila=234
hmotnost=88
delka=16,14
max_rych=90
max_rych_hnaci=90
elektro=1
[122]
typ=hnaci
podtrida=El. stejnosměrné
popis=Nákladní lokomotiva
vyrobce=Škoda Plzeň (1967)
vykon=2040
sila=224
hmotnost=85
delka=17,21
max_rych=90
max_rych_hnaci=90
elektro=1
[123]
typ=hnaci
podtrida=El. stejnosměrné
popis=Nákladní lokomotiva
výrobce=Škoda Plzeň (1971)
vykon=2040
sila=224
hmotnost=85
delka=17,21
max_rych=90
max_rych_hnaci=90
elektro=1
[124]
typ=hnaci
podtrida=El. stejnosměrné
popis=Zkušebni lokomotiva
výrobce=Škoda Plzeň(1971)
vykon=2472
sila=224
hmotnost=86
delka=17,21
max_rych=160
max_rych_hnaci=160
elektro=1
[130]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
výrobce=Škoda Plzeň (1977)
vykon=2040
sila=228
hmotnost=85
delka=17,21
max_rych=100
max_rych_hnaci=100
elektro=1
[131 CD]
typ=hnaci
podtrida=El. stejnosměrné
popis=Dvojdílná nákladní lokomotiva
vyrobce=Škoda Plzeň(1980)
vykon=4480
sila=350
hmotnost=169
delka=34,42
max_rych=100
max_rych_hnaci=100
elektro=1
[131 ZSR]
typ=hnaci
podtrida=El. stejnosměrné
popis=Dvojdílná nákladní lokomotiva
vyrobce=Škoda Plzeň(1980)
vykon=4480
sila=350
hmotnost=169
delka=34,42
max_rych=100
max_rych_hnaci=100
elektro=1
[140]
typ=hnaci
podtrida=El. stejnosměrné
popis=univerzální lokomotiva
výrobce=Škoda Plzeň (1953)
vykon=2032
sila=212
hmotnost=82
delka=15,80
max_rych=120
max_rych_hnaci=120
elektro=1
[141]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1957)
vykon=2032
sila=225
hmotnost=84
delka=16,14
max_rych=120
max_rych_hnaci=120
elektro=1
[150]
typ=hnaci
podtrida=El. stejnosměrné
popis=Rychlíková lokomotiva
vyrobce=Škoda Pzeň (1978)
vykon=4000
sila=227
hmotnost=82
delka=16,74
max_rych=140
max_rych_hnaci=140
elektro=1
[151]
typ=hnaci
podtrida=El. stejnosměrné
popis=Rychlíková lokomotiva
vyrobce=Škoda Plzeň (1978, reko
vykon=4000
sila=227
hmotnost=82
delka=16,74
max_rych=160
max_rych_hnaci=160
elektro=1
[162 ZSR]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1984)
vykon=3378
sila=258
hmotnost=85
delka=16,80
max_rych=140
max_rych_hnaci=140
elektro=1
[163 CD]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
výrobce=Škoda Plzeň (1984, reko
vykon=3480
sila=285
hmotnost=85
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=1
[169]
typ=hnaci
podtrida=El. stejnosměrné
popis=Prototypová lokomotiva
výrobce=Škoda Plzeň (1987)
vykon=3200
sila=225
hmotnost=75
delka=18,00
max_rych=120
max_rych_hnaci=120
elektro=1
[180]
typ=hnaci
podtrida=El. stejnosměrné
popis=Nnákladní lokomotiva
vyrobce=Škoda Plzeň (1958)
vykon=2610
sila=345
hmotnost=119
delka=18,80
max_rych=90
max_rych_hnaci=90
elektro=1
[181]
typ=hnaci
podtrida=El. stejnosměrné
popis=Nákladní lokomotiva
vyrobce=Škoda Plzeň (1961)
vykon=2610
sila=45
hmotnost=124
delka=18,80
max_rych=90
max_rych_hnaci=90
elektro=1
[182]
typ=hnaci
podtrida=El. stejnosměrné
popis=Nákladní lokomotiva
výrobce=Škoda Plzeň (1963)
vykon=2790
sila=345
hmotnost=120
delka=18,80
max_rych=90
max_rych_hnaci=90
elektro=1
[183]
typ=hnaci
podtrida=El. stejnosměrné
popis=Nákladní lokomotiva
výrobce=Škoda Plzeň (1971)
vykon=2790
sila=345
hmotnost=120
delka=18,94
max_rych=90
max_rych_hnaci=90
elektro=1
[184]
typ=hnaci
podtrida=El. stejnosměrné
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1994)
vykon=5220
sila=575
hmotnost=120
delka=30,35
max_rych=95
max_rych_hnaci=95
elektro=1
[210]
typ=hnaci
podtrida=El. střídavé
popis=Posunovací lokomotiva
výrobce=Škoda Plzeň (1972)
vykon=880
sila=164
hmotnost=72
delka=14,40
max_rych=80
max_rych_hnaci=80
elektro=2
[230]
typ=hnaci
podtrida=El. střídavé
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1966)
vykon=3080
sila=320
hmotnost=85
delka=16,44
max_rych=110
max_rych_hnaci=110
elektro=2
[240]
typ=hnaci
podtrida=El. střídavé
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1969)
vykon=3080
sila=255
hmotnost=85
delka=16,44
max_rych=140
max_rych_hnaci=140
elektro=2
[242]
typ=hnaci
podtrida=El. střídavé
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1975)
vykon=3080
sila=240
hmotnost=84
delka=16,44
max_rych=120
max_rych_hnaci=120
elektro=2
[263 CD]
typ=hnaci
podtrida=El. střídavé
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1985)
vykon=3060
sila=250
hmotnost=84
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=2
[263 ZSR]
typ=hnaci
podtrida=El. střídavé
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1985)
vykon=3060
sila=250
hmotnost=84
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=2
[280]
typ=hnaci
podtrida=El. střídavé
popis=Prototypová lokomotiva
vyrobce=Škoda Plzeň (1963)
vykon=4920
sila=310
hmotnost=126
delka=20,00
max_rych=120
max_rych_hnaci=120
elektro=2
[340]
typ=hnaci
podtrida=El. dvojsystemové
popis=Unoverzální lokomotiva
vyrobce=Škoda Plzeň (1966, reko
vykon=2050
sila=320
hmotnost=85
delka=16,44
max_rych=110
max_rych_hnaci=110
elektro=17
[350 ZSR]
typ=hnaci
podtrida=El. dvojsystemové
popis=Rychlíková lokomotiva
vyrobce=Škoda Plzeň (1973)
vykon=4000
sila=210
hmotnost=90
delka=17,24
max_rych=160
max_rych_hnaci=160
elektro=3
[362]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1980)
vykon=3060
sila=260
hmotnost=87
delka=16,80
max_rych=140
max_rych_hnaci=140
elektro=3
[363 CD]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1980)
vykon=3060
taž.sila=260
hmotnost=87
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=3
[363 ZSR]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1980)
vykon=3060
taž.sila=260
hmotnost=87
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=3
[371]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
výrobce=Škoda Plzeň (1981, reko
vykon=3080
taž.sila=243
hmotnost=84
delka=16,80
max_rych=160
max_rych_hnaci=160
elektro=17
[372]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1981)
vykon=3080
taž.sila=243
hmotnost=84
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=17
[180 DR]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeň (1981)
vykon=3080
taž.sila=243
hmotnost=84
delka=16,80
max_rych=120
max_rych_hnaci=120
elektro=17
[181 DR]
typ=hnaci
podtrida=El. dvojsystemové
popis=Univerzální lokomotiva
vyrobce=Škoda Plzeń (1981, reko
vykon=3080
taž.sila=243
hmotnost=84
delka=16,80
max_rych=160
max_rych_hnaci=160
elektro=17
[451.1]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=1320
sila=78
hmotnost=182
delka=97,59
max_rych=100
max_rych_hnaci=100
elektro=1
cel.osob=680
[451.2]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=1320
sila=78
hmotnost=216
delka=120,94
max_rych=100
max_rych_hnaci=100
elektro=1
cel.osob=874
[452.1]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
výkon=1320
sila=78
hmotnost=182
delka=97,59
max_rych=100
max_rych_hnaci=100
elektro=1
cel.osob=680
[452.2]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=1320
sila=78
hmotnost=216
delka=120,94
max_rych=100
max_rych_hnaci=100
elektro=1
cel.osob=874
[460 m]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=2000
sila=117
hmotnost=239
delka=122,50
max_rych=110
max_rych_hnaci=110
elektro=1
cel.osob=858
[460 z]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=2000
sila=117
hmotnost=239
delka=122,50
max_rych=110
max_rych_hnaci=110
elektro=1
cel.osob=858
[470 s]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Moravskoslezská Vagonka
vykon=1500
sila=143
hmotnost=266
delka=132
max_rych=120
max_rych_hnaci=120
elektro=1
cel.osob=1354
[470 z]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=Moravskoslezská Vagonka
vykon=1500
sila=143
hmotnost=266
delka=132
max_rych=120
max_rych_hnaci=120
elektro=1
cel.osob=1354
[471]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis=El. osob. jednotka
vyrobce=ČKD vagonka Ostrava
vykon=2000
sila=98
hmotnost=156
delka=79,20
max_rych=140
max_rych_hnaci=140
elektro=1
cel.osob=643
[560 m]
typ=hnaci
podtrida=El. jednotky střídavé
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=1680
sila=178
hmotnost=247
delka=122,50
max_rych=110
max_rych_hnaci=110
elektro=2
cel.osob=672
[560 z]
typ=hnaci
podtrida=El. jednotky střídavé
popis=El. osob. jednotka
vyrobce=Vagonka Tatra Studénka
vykon=1680
sila=178
hmotnost=247
delka=122,50
max_rych=110
max_rych_hnaci=110
elektro=2
cel.osob=672
[680]
typ=hnaci
podtrida=El. jednotky vícesystémové
popis=El. osob. jednotka
vyrobce=ALSTOM Italie (2003)
vykon=3920
sila=200
hmotnost=378
delka=184,80
max_rych=230
max_rych_hnaci=230
elektro=19
cel.osob=331
[700]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselmechanická lokomotiva
vyrobce=ČKD Praha (1955)
vykon=121
sila=53
hmotnost=22
delka=7,24
max_rych=40
max_rych_hnaci=40
[701]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselmechanická lokomotiva
vyrobce=ČKD Praha (1957)
vykon=147
sila=58
hmotnost=22
delka=7,24
max_rych=40
max_rych_hnaci=40
[704]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1988)
vykon=220
sila=103
hmotnost=29
delka=7,50
max_rych=60
max_rych_hnaci=60
[708]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1997)
vykon=470
sila=110
hmotnost=37
delka=9,45
max_rych=80
max_rych_hnaci=80
[710]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselhydromechanická
vyrobce=ČKD Praha (1961)
vykon=301
sila=57
hmotnost=41
delka=9,44
max_rych=60
max_rych_hnaci=60
[714]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha(1961, reko
vykon=600
sila=104
hmotnost=64
delka=14,24
max_rych=90
max_rych_hnaci=90
[720 c]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1959)
vykon=552
sila=160
hmotnost=61
delka=12,44
max_rych=60
max_rych_hnaci=60
[720 m]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1959)
ykon=552
sila=160
hmotnost=61
delka=12,44
max_rych=60
max_rych_hnaci=60
[721]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1962)
vykon=552
sila=186
hmotnost=74
delka=13,26
max_rych=80
max_rych_hnaci=80
[730]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1989)
vykon=600
sila=205
hmotnost=70
delka=13,98
max_rych=80
max_rych_hnaci=80
[731]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1990)
vykon=600
sila=205
hmotnost=69
delka=15,26
max_rych=80
max_rych_hnaci=80
[735 c]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=TS Martin (1973)
vykon=927
sila=192
hmotnost=64
delka=14,18
max_rych=90
max_rych_hnaci=90
[735 m]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=TS Martin (1973)
vykon=927
sila=192
hmotnost=64
delka=14
max_rych=90
max_rych_hnaci=90
[742]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1977)
vykon=883
sila=192
hmotnost=64
delka=13,58
max_rych=90
max_rych_hnaci=90
[743]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1987)
vykon=800
sila=123
hmotnost=66
delka=13,60
max_rych=90
max_rych_hnaci=90
[751]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1966)
vykon=1103
sila=215
hmotnost=75
delka=16,50
max_rych=100
max_rych_hnaci=100
[752]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1969)
vykon=1102
sila=215
hmotnost=74
delka=16,50
max_rych=100
max_rych_hnaci=100
[753]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1970)
vykon=1325
sila=215
hmotnost=73
delka=16,54
max_rych=100
max_rych_hnaci=100
[754]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1975)
vykon=1460
sila=180
hmotnost=74
delka=16,54
max_rych=100
max_rych_hnaci=100
[759]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1974)
vykon=1766
sila=270
hmotnost=85
delka=17,84
max_rych=140
max_rych_hnaci=140
[770]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=SMZ Dubnica n.Váh. (1966)
vykon=993
sila=280
hmotnost=115
delka=1,24
max_rych=90
max_rych_hnaci=90
[775]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=ČKD Praha (1961)
vykon=1470
sila=244
hmotnost=111
delka=18
max_rych=100
max_rych_hnaci=100
[781]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
vyrobce=Lokomotivka Vorošilovgrad
vykon=1470
sila=285
hmotnost=116
delka=17,55
max_rych=100
max_rych_hnaci=100
[801]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Tatra Kopřivnice (1948)
vykon=114
sila=28
hmotnost=17
delka=12,10
max_rych=60
max_rych_hnaci=60
cel.osob=48
[810]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=155
sila=29
hmotnost=20
delka=13,97
max_rych=80
max_rych_hnaci=80
cel.osob=95
[811 CD]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=155
sila=40
hmotnost=20
delka=13,97
max_rych=80
max_rych_hnaci=80
cel.osob=73
[811 ZSR]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=237
sila=40
hmotnost=24
delka=13,97
max_rych=80
max_rych_hnaci=80
cel.osob=91
[812]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=242
sila=54
hmotnost=21
delka=14,47
max_rych=80
max_rych_hnaci=80
cel.osob=82
[820]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=206
sila=40
hmotnost=31
delka=18,50
max_rych=70
max_rych_hnaci=70
cel.osob=102
[830.1s]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Královopolská strojírna
vykon=301
sila=45
hmotnost=43
delka=21,20
max_rych=90
max_rych_hnaci=90
cel.osob=80
[830.2s]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=301
sila=53
hmotnost=47
delka=21,24
max_rych=90
max_rych_hnaci=90
cel.osob=80
[830.3s]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=301
sila=53
hmotnost=47
delka=21,24
max_rych=90
max_rych_hnaci=90
cel.osob=80
[831]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=308
sila=53
hmotnost=47
delka=21,24
max_rych=90
max_rych_hnaci=90
cel.osob=80
[842]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Moravskoslezská Vagonka
vykon=424
sila=60
hmotnost=46
delka=24,70
max_rych=100
max_rych_hnaci=100
cel.osob=128
[843]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Moravskoslezská Vagonka
vykon=600
sila=100
hmotnost=56
delka=25,20
max_rych=110
max_rych_hnaci=110
cel.osob=104
[850]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=515
sila=77
hmotnost=50
delka=24,79
max_rych=110
max_rych_hnaci=110
cel.osob=108
[851]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=588
sila=78
hmotnost=51
delka=24,79
max_rych=110
max_rych_hnaci=110
cel.osob=108
[852]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=588
sila=78
hmotnost=51
delka=24,79
max_rych=120
max_rych_hnaci=120
cel.osob=68
[853]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=588
sila=78
hmotnost=51
delka=24,79
max_rych=120
max_rych_hnaci=120
cel.osob=68
[854]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=588
sila=80
hmotnost=52
delka=24,79
max_rych=120
max_rych_hnaci=120
cel.osob=108
[860]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
vyrobce=Vagonka Tatra Studénka
vykon=442
sila=103
hmotnost=56
delka=24,50
max_rych=100
max_rych_hnaci=100
cel.osob=130
[870]
typ=hnaci
podtrida=Motorové jednotky
popis=Motorový osobní jednotka
vyrobce=<NAME>
vykon=912
sila=205
hmotnost=178
delka=93,92
max_rych=130
max_rych_hnaci=130
cel.osob=324
[001]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=12
delka=12,10
max_rych=60
max_rych_hnaci=60
cel.osob=53
[010]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=15
delka=13,97
max_rych=80
max_rych_hnaci=80
cel.osob=104
[011 CD]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=15
delka=13,97
max_rych=80
max_rych_hnaci=80
cel.osob=104
[011 ZSR]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=16
delka=13,97
max_rych=80
max_rych_hnaci=80
cel.osob=107
[020]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=23
delka=18,50
max_rych=90
max_rych_hnaci=90
cel.osob=108
[021]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=24
delka=18,50
max_rych=90
max_rych_hnaci=90
cel.osob=108
[022]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=24
delka=18,50
max_rych=90
max_rych_hnaci=90
cel.osob=80
[043]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Moravskoslezská Vagonka
hmotnost=27
delka=19,70
max_rych=120
max_rych_hnaci=120
cel.osob=132
[050]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=32
delka=24,79
max_rych=110
max_rych_hnaci=110
cel.osob=120
[053]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=34
delka=24,50
max_rych=120
max_rych_hnaci=120
cel.osob=128
[054]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
vyrobce=Vagonka Tatra Studénka
hmotnost=34
delka=24,50
max_rych=120
max_rych_hnaci=120
cel.osob=128
[912]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný řídící osobní vůz
vyrobce=Vagonka <NAME>
hmotnost=17
delka=14,22
max_rych=80
max_rych_hnaci=80
cel.osob=110
[943]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný řídící osobní vůz
vyrobce=Moravskoslezská Vagonka
hmotnost=28
delka=29,70
max_rych=120
max_rych_hnaci=120
cel.osob=106
[ABee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 a 2 třídy (1973)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=60
[ABtee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 a 2 třídy (1973)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=60
[ABtee CSD]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 a 2 třídy (1973)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=60
[Acee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní, lůžkový vůz 1 třídy
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=36
[Acz]
typ=vuz
podtrida=Osobní vozy
popis=Osobní, lehátkový vůz 1 třídy
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=36
[Aee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1975)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[Aeer]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[Amee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1985)
hmotnost=40
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=66
[Ampz]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=47
delka=26,40
max_rych=200
max_rych_hnaci=200
cel.osob=54
[Asee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1980)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[Atee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[Atee CSD]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[Bai]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1960)
hmotnost=39
delka=24,50
max_rych=120
max_rych_hnaci=120
cel.osob=72
[Bcee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní, lůžkový vůz 2 třídy
hmotnost=40
delka=24,50
max_rych=140
max_rych_hnaci=140
[Bcz]
typ=vuz
podtrida=Osobní vozy
popis=Osobní, lehátkový vůz 2 třídy
hmotnost=40
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=36
[ADbmee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd., odd. pro inval.
hmotnost=37
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=40
[BDbmee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd., odd. pro inval.
hmotnost=37
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=40
[BDee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1975)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BDeer]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1980,
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BDsee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1980)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BDtee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1972)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BDtee CSD]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1972)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[Bee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1975)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[Bsee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1980)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[Btee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[Btee CSD]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[Beer]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[Bmee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1985)
hmotnost=48
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=66
[Bmo]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy,
hmotnost=44
delka=26,80
max_rych=100
max_rych_hnaci=100
cel.osob=126
[Bmpz]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=49
delka=26
max_rych=200
max_rych_hnaci=200
cel.osob=64
[Bymee]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1989)
hmotnost=46
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=68
[BReer]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[BRm]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[BRmee]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=28
delka=26,40
max_rych=160
max_rych_hnaci=160
[BR CSD]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy (1972)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
[WRmpz]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=50
delka=26,40
osob=24
max_rych=200
max_rych_hnaci=200
[Dee]
typ=vuz
podtrida=Osobní vozy služební
popis=Osobní vůz služební,
hmotnost=36
delka=23,30
max_rych=120
max_rych_hnaci=120
[Dee CSD]
typ=vuz
podtrida=Osobní vozy služební
popis=Osobní vůz služební,
hmotnost=36
delka=23,30
max_rych=120
max_rych_hnaci=120
[Dhee]
typ=vuz
podtrida=Osobní vozy služební
popis=Osobní vůz služební,
hmotnost=40
delka=24,50
max_rych=160
max_rych_hnaci=160
[Sa CD]
typ=vuz
podtrida=Osobní vozy služební
popis=Osobní vůz salonní (1975)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=40
[WLAB]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Osobní, lůžkový vůz 1 a 2 třídy
hmotnost=43
delka=24,50
max_rych=160
max_rych_hnaci=160
cel.osob=32
[WLABmee 1]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Osobní, lůžkový vůz 1 a 2 třídy
hmotnost=48
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=34
[WLABmee 2]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Osobní, lůžkový vůz 1 a 2 třídy
hmotnost=48
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=34
[WLA]
typ=vuz
podtrida=Osobní vozy lehátkový
popis=Osobní, lůžkový vůz 1 třídy
hmotnost=42
delka=24,90
max_rych=120
max_rych_hnaci=120
cel.osob=22
[WLB]
typ=vuz
podtrida=Osobní vozy lehátkový
popis=Osobní, lůžkový vůz 2 třídy
hmotnost=42
delka=24,90
max_rych=120
max_rych_hnaci=120
cel.osob=33
[Post 1.1]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 1 odd. (1980)
hmotnost=34
delka=23,50
max_rych=140
max_rych_hnaci=140
[Post 1.2]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 3 odd. (1980)
hmotnost=34
delka=23,50
max_rych=140
max_rych_hnaci=140
[Post 1.3]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 2 odd. (1982)
hmotnost=34
delka=23,50
max_rych=140
max_rych_hnaci=140
[Post 2.1]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 1 odd. (1980)
hmotnost=34
delka=23,50
max_rych=140
max_rych_hnaci=140
[Post 2.2]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 3 odd. (1980)
hmotnost=34
delka=23,50
max_rych=140
max_rych_hnaci=140
[Post 2.3]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 2 odd. (1982)
hmotnost=34
delka=23,50
max_rych=140
max_rych_hnaci=140
[Amee ZSR]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1985)
hmotnost=40
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=66
[A ZSR]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[BDmee ZSR]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1985,
hmotnost=37
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=40
[BD ZSR]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1975)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[Bmee ZSR]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1985)
hmotnost=48
delka=26,40
max_rych=160
max_rych_hnaci=160
cel.osob=66
[B ZSR]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1975)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BRmee ZSR]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=48
delka=26
osob=48
max_rych=160
max_rych_hnaci=160
[BRm ZSR]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy (1975, reko
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[A DB]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[B DB]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BD DB]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy s
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BRm DB]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[A MAV]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[B MAV]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1982)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BD MAV]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy 1982)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BRm MAV]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy (1982)
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[A OBB]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[B OBB]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BD OBB]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy s
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BRm OBB]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[A PKP]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy (1972)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[B PKP]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy (1982)
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BD PKP]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy 1982)
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BRm PKP]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy (1982)
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[Adu PKP]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=46
delka=26
max_rych=180
max_rych_hnaci=180
cel.osob=66
[Bdu PKP]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=48
delka=26
osob=88
max_rych=180
max_rych_hnaci=180
[BDdu PKP]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy s
hmotnost=45
delka=26
osob=40
max_rych=180
max_rych_hnaci=180
[BRdu PKP]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=48
delka=26
osob=48
max_rych=180
max_rych_hnaci=180
[A SBB]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[B SBB]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BD SBB]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy s
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=48
[BRm SBB]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[A RZD]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=54
[B RZD]
typ=vuz
podtrida=Osobní vozy
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=39
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=57
[BD RZD]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy s
hmotnost=38
delka=24,50
max_rych=140
max_rych_hnaci=140
cel.osob=46
[BRm RZD]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=25
delka=24,50
max_rych=140
max_rych_hnaci=140
[Eal]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=22
delka=14
typ_nakladu=sypke hmoty
naklad=58
max_rych=80
[Eal U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=22
delka=14
typ_nakladu=uhlí
naklad=58
max_rych=80
[Eas-u]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=24
delka=14
typ_nakladu=sypke hmoty
naklad=56
max_rych=120
[Eas-u U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=24
delka=14
typ_nakladu=uhlí
naklad=56
max_rych=90
[Eas-u S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=24
delka=14
typ_nakladu=šrot
naklad=56
max_rych=90
[Eamos]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty s
hmotnost=24
delka=14
typ_nakladu=sypke hmoty
naklad=56
max_rych=120
[Eanos]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty s plachtou
hmotnost=23
delka=14
typ_nakladu=sypke hmoty
naklad=56
max_rych=120
[Faccs]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=14
typ_nakladu=sypke hmoty
naklad=56
max_rych=100
[Faccs P]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=14
typ_nakladu=písek
naklad=56
max_rych=90
[Faccs S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=14
typ_nakladu=štěrk
naklad=56
max_rych=90
[Faccs V]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=14
typ_nakladu=vápenec
naklad=56
max_rych=90
[Falns]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=26
delka=15
typ_nakladu=sypke hmoty
naklad=47
max_rych=120
[Falns U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=26
delka=15
typ_nakladu=uhlí
naklad=47
max_rych=100
[Falns V]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=26
delka=15
typ_nakladu=vápenec
naklad=47
max_rych=100
[Fals-z]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=24
delka=14
typ_nakladu=sypke hmoty
naklad=57
max_rych=120
[Fals-z U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=24
delka=14
typ_nakladu=uhlí
naklad=57
max_rych=100
[Fals-z V]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=24
delka=14
typ_nakladu=vápenec
naklad=57
max_rych=100
[Fcc]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=13
delka=12
typ_nakladu=sypke hmoty
naklad=27
max_rych=80
[Fcc P]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=13
delka=12
typ_nakladu=písek
naklad=27
max_rych=80
[Lhs]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty a kusové
hmotnost=21
delka=14
typ_nakladu=sypke hmoty a kusové
naklad=52
max_rych=100
[Lhs D]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty a kusové
hmotnost=21
delka=14
typ_nakladu=dřevo
naklad=52
max_rych=100
[Lhs R]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty a kusové
hmotnost=21
delka=14
typ_nakladu=cukrová řepa
naklad=52
max_rych=100
[Lhs U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty a kusové
hmotnost=21
delka=14
typ_nakladu=uhlí
naklad=52
max_rych=100
[VSa]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz vysokostěný na sypke
hmotnost=24
delka=16
typ_nakladu=sypke hmoty
naklad=55
max_rych=100
[Vsa U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz vysokostěný na sypke
hmotnost=24
delka=16
typ_nakladu=uhlí
naklad=55
max_rych=100
[Vsa S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz vysokostěný na sypke
hmotnost=24
delka=16
typ_nakladu=šrot
naklad=55
max_rych=100
[Vtr]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[Vtr D]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=dřevo
naklad=30
max_rych=90
[Vtr R]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=cukrová řepa
naklad=30
max_rych=90
[Vtr U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=uhlí
naklad=30
max_rych=90
[Vtr S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=šrot
naklad=30
max_rych=90
[Talls]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=28
delka=13
typ_nakladu=sypke hmoty
naklad=51
max_rych=12
[Tads s]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=20
delka=17
typ_nakladu=sypke hmoty
naklad=60
max_rych=100
[Tads zl]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=20
delka=17
typ_nakladu=sypke hmoty
naklad=60
max_rych=100
[Tcms]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=13
delka=10
typ_nakladu=sypke hmoty
naklad=20
max_rych=120
[Tdgns]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=15
delka=10
typ_nakladu=sypke hmoty
naklad=30
max_rych=120
[Tdns]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty (hojiva) s
hmotnost=15
delka=10
typ_nakladu=sypke hmoty
naklad=30
max_rych=120
[Gags]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=16
typ_nakladu=kusové zboží
naklad=55
max_rych=100
[Gbkkqs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=15
delka=14
typ_nakladu=kusové zboží
naklad=24
max_rych=100
[Gbkkqss]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=16
delka=14
typ_nakladu=kusové zboží
naklad=20
max_rych=120
[Gbqs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží (len)
hmotnost=15
delka=14
typ_nakladu=pouze len
naklad=25
max_rych=100
[Gbs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=15
delka=16
typ_nakladu=kusové zboží
naklad=26
max_rych=100
[Gogs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží (obilí)
hmotnost=25
delka=16
typ_nakladu=převážně obilí
naklad=55
max_rych=100
[Habbins]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=29
delka=18
typ_nakladu=kusové zboží
naklad=61
max_rych=120
[Habbillns]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=30
delka=24
typ_nakladu=kusové zboží
naklad=60
max_rych=120
[Hbbillns]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=17
delka=16
typ_nakladu=kusové zboží
naklad=27
max_rych=120
[Haikks]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=19
typ_nakladu=kusové zboží
naklad=56
max_rych=120
[Haks m]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží kryté
hmotnost=24
delka=16
typ_nakladu=kusové zboží
naklad=52
max_rych=120
[Haks s]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží kryté
hmotnost=24
delka=16
typ_nakladu=kusové zboží
naklad=52
max_rych=120
[Haks z]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží kryté
hmotnost=24
delka=16
typ_nakladu=kusové zboží
naklad=52
max_rych=120
[Iacqr]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Chladirenský vuz na kusové
hmotnost=30
delka=16
typ_nakladu=kusové zboží
naklad=30
max_rych=100
[L]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=12
delka=12
typ_nakladu=pouze potrainy
naklad=28
max_rych=100
[La]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Chladirenský vuz na kusové
hmotnost=34
delka=16
typ_nakladu=pouze potrainy
naklad=37
max_rych=100
[Lbbhps b]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=19
delka=14
typ_nakladu=pouze potrainy
naklad=17
max_rych=100
[Lbbhps zl]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=19
delka=14
typ_nakladu=pouze potrainy
naklad=17
max_rych=100
[Lbps b]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=18
delka=14
typ_nakladu=pouze potrainy
naklad=181
max_rych=100
[Lbps bp]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=18
delka=14
typ_nakladu=pouze potrainy
naklad=18
max_rych=100
[Rils s]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=18
typ_nakladu=kusové zboží
naklad=55
max_rych=120
[Rils c]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=18
typ_nakladu=kusové zboží
naklad=55
max_rych=120
[Rils m]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=18
typ_nakladu=kusové zboží
naklad=55
max_rych=120
[Rils zl]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=18
typ_nakladu=kusové zboží
naklad=55
max_rych=120
[Sis c]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=15
typ_nakladu=kusové zboží
naklad=62
max_rych=12
[Sis m]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=15
typ_nakladu=kusové zboží
naklad=62
max_rych=120
[Simms c]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=14
typ_nakladu=kusové zboží
naklad=65
max_rych=12
[Simms zl]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=14
typ_nakladu=kusové zboží
naklad=65
max_rych=12
[Simms m]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=14
typ_nakladu=kusové zboží
naklad=65
max_rych=120
[Simms z]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=14
typ_nakladu=kusové zboží
naklad=65
max_rych=120
[Tbis]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=15
delka=15
typ_nakladu=kusové zboží
naklad=26
max_rych=120
[Zt h]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=12
delka=12
typ_nakladu=kusové zboží
naklad=28
max_rych=100
[Zt zl]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=12
delka=12
typ_nakladu=pouze potrainy
naklad=28
max_rych=100
[Zt hb]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=12
delka=12
typ_nakladu=kusové zboží
naklad=28
max_rych=100
[Ks]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=14
delka=13
typ_nakladu=kusové zboží
naklad=30
max_rych=120
[Ks D]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=14
delka=13
typ_nakladu=dřevo
naklad=30
max_rych=100
[Lpsc]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Dvojitý vuz na přepravu
hmotnost=27
delka=26
typ_nakladu=automobily
naklad=25
max_rych=100
[Lpsc A]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Dvojitý vuz na přepravu
hmotnost=27
delka=26
typ_nakladu=automobily
naklad=25
max_rych=100
[Npp]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=17
delka=15
typ_nakladu=automobily
naklad=18
max_rych=100
[Npp A]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=17
delka=15
typ_nakladu=automobily
naklad=18
max_rych=90
[Pao]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=kusové zboží
naklad=54
max_rych=120
[Pao D]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=dřevo
naklad=54
max_rych=100
[Pao Ka]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=kabely
naklad=54
max_rych=100
[Pao Ko]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=kontejnery
naklad=54
max_rych=100
[Pao Kov]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=opracované ingoty
naklad=54
max_rych=100
[Pao Pl]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=plech
naklad=54
max_rych=100
[Paon]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové a sypké zboží
hmotnost=23
delka=15
typ_nakladu=kusové zboží
naklad=57
max_rych=80
[Paon P]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové a sypké zboží
hmotnost=23
delka=15
typ_nakladu=písek
naklad=57
max_rych=80
[Paop]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=28
delka=16
typ_nakladu=automobily
naklad=52
max_rych=120
[Paop A]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=28
delka=16
typ_nakladu=automobily
naklad=52
max_rych=100
[Ress]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=25
delka=20
typ_nakladu=kusové zboží
naklad=55
max_rych=120
[Ress Ka]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=25
delka=20
typ_nakladu=kabely
naklad=55
max_rych=100
[Ress T]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=25
delka=20
typ_nakladu=trubky
naklad=55
max_rych=100
[Rmms]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=28
delka=18
typ_nakladu=kusové zboží
naklad=52
max_rych=120
[Rmms Ka]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=28
delka=18
typ_nakladu=kabely
naklad=52
max_rych=120
[DEC b]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekute hmoty
naklad=56
max_rych=100
[DEC zl]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekute hmoty
naklad=56
max_rych=100
[DEC c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekute hmoty
naklad=56
max_rych=100
[DEC m]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekute hmoty
naklad=56
max_rych=100
[Uacns c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypke hmoty
hmotnost=19
delka=18
typ_nakladu=sypke hmoty
naklad=72
max_rych=120
[Uacns zl]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypke hmoty
hmotnost=19
delka=18
typ_nakladu=sypke hmoty
naklad=72
max_rych=120
[Uapgs]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=24
delka=15
typ_nakladu=tekuté hmoty
naklad=48
max_rych=100
[Uasc m]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=23
delka=14
typ_nakladu=sypké hmoty
naklad=57
max_rych=100
[Uasc c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=23
delka=14
typ_nakladu=sypké hmoty
naklad=57
max_rych=100
[Ugps]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=17
delka=14
typ_nakladu=sypké hmoty
naklad=28
max_rych=100
[Ups]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=14
delka=12
typ_nakladu=sypké hmoty
naklad=27
max_rych=100
[Zacens]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=24
delka=15
typ_nakladu=tekutá síra
naklad=66
max_rych=120
[Zaes zl]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=26
delka=12
typ_nakladu=tekuté hmoty
naklad=54
max_rych=120
[Zaes s]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=26
delka=12
typ_nakladu=tekuté hmoty
naklad=54
max_rych=120
[Zags]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=22
delka=13
typ_nakladu=tekuté hmoty
naklad=48
max_rych=100
[Zkks m]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=12
delka=10
typ_nakladu=tekuté hmoty
naklad=20
max_rych=100
[Zkks c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=12
delka=10
typ_nakladu=tekuté hmoty
naklad=20
max_rych=100
[Zkks zb]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=12
delka=10
typ_nakladu=tekuté hmoty
naklad=20
max_rych=100
[Zkks zl]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=12
delka=10
typ_nakladu=tekuté hmoty
naklad=20
max_rych=100
[Flls]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty
hmotnost=22
delka=16
typ_nakladu=sypke hmoty
naklad=58
max_rych=65
[Flls S]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty
hmotnost=22
delka=16
typ_nakladu=štěrk
naklad=58
max_rych=65
[Ua]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty speciální
hmotnost=27
delka=10
typ_nakladu=sypke hmoty
naklad=52
max_rych=50
[Ua S]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty speciální
hmotnost=27
delka=10
typ_nakladu=štěrk
naklad=52
max_rych=50
[Fas]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na kontejnerové vany
hmotnost=31
delka=16
typ_nakladu=kontejnerové vany
naklad=61
max_rych=100
[Fbks]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na roztavenou ocel
hmotnost=16
delka=13
typ_nakladu=roztavená ocel
naklad=24
max_rych=80
[Uaain]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Hlubiný vuz speciální stavby
hmotnost=70
delka=28
typ_nakladu=PLM zásilky
naklad=140
max_rych=100
[Uaain b]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Hlubiný vuz speciální stavby
hmotnost=70
delka=28
typ_nakladu=PLM zásilky
naklad=140
max_rych=100
[162]
typ=hnaci
podtrida=El. stejnosměrné
popis=Stejnosměrná lokomotiva
vykon=3060
hmotnost=84
delka=17
max_rych=140
max_rych_hnaci=140
elektro=1
[163]
typ=hnaci
podtrida=El. stejnosměrné
popis=Stejnosměrná lokomotiva
vykon=3060
hmotnost=84
delka=17
max_rych=120
max_rych_hnaci=120
elektro=1
[263]
typ=hnaci
podtrida=El. střídavé
popis=Střídavá lokomotiva
vykon=3060
hmotnost=85
delka=17
max_rych=120
max_rych_hnaci=120
elektro=2
[350]
typ=hnaci
podtrida=El. dvojsystemove
popis=El. dvojsystemova loko
vykon=4000
hmotnost=87
delka=16
max_rych=160
max_rych_hnaci=160
sila=227
elektro=3
[363]
typ=hnaci
podtrida=El. dvojsystemove
popis=El. dvojsystemova loko (1981)
vykon=3060
hmotnost=78
delka=17
max_rych=120
max_rych_hnaci=120
elektro=3
[OBB 1116]
typ=hnaci
podtrida=El. dvojsystemove
popis=El. dvojsystemova loko
vykon=3000
hmotnost=80
delka=17
max_rych=160
max_rych_hnaci=160
elektro=3
[Raj]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[011]
typ=vuz
podtrida=Pripojne vozy
popis=Pripojny vuz
hmotnost=12
delka=14
osob=85
max_rych=80
max_rych_hnaci=80
[705]
typ=hnaci
podtrida=Diesel lokomotivy
popis=Úzkorozchodná diesel (1954)
vykon=260
hmotnost=32
delka=13
max_rych=50
max_rych_hnaci=50
rozchod=760
[720]
typ=hnaci
podtrida=Diesel lokomotivy
popis=Lokomotiva (1958)
vykon=550
hmotnost=60
delka=13
max_rych=60
max_rych_hnaci=60
[749]
typ=hnaci
podtrida=Diesel lokomotivy
popis=Lokomotiva ČKD (1966)
vykon=1100
hmotnost=74
delka=16
max_rych=100
max_rych_hnaci=100
[750]
typ=hnaci
podtrida=Diesel lokomotivy
popis=Lokomotiva ČKD (1975)
vykon=1325
hmotnost=73
delka=16
max_rych=100
max_rych_hnaci=100
[A]
typ=vuz
podtrida=Osobní vozy
popis=1.tr. kupe
hmotnost=40
delka=25
osob=80
max_rych=140
[Aeel]
typ=vuz
podtrida=Osobní vozy
popis=1.tr.
hmotnost=40
delka=25
osob=80
max_rych=160
[AB]
typ=vuz
podtrida=Osobní vozy
popis=1.+2.tr. kupe
hmotnost=40
delka=25
osob=80
max_rych=140
[ZSR Bpeer]
typ=vuz
podtrida=Osobní vozy
popis=2.tr.
hmotnost=40
delka=25
osob=80
max_rych=160
[ZSR ARpeer]
typ=vuz
podtrida=Osobní vozy
popis=1.tr.+jidelni
hmotnost=40
delka=25
osob=80
max_rych=160
[ZSR WLAB]
typ=vuz
podtrida=Osobní vozy
popis=Spaci
hmotnost=40
delka=25
osob=80
max_rych=160
[ZSR BRcm]
typ=vuz
podtrida=Osobní vozy
popis=1.tr.+jidelni
hmotnost=40
delka=25
osob=80
max_rych=160
[B]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. kupe
hmotnost=40
delka=25
osob=80
max_rych=140
[Bcc]
typ=vuz
podtrida=Osobní vozy
popis=2.tr.
hmotnost=40
delka=25
osob=80
max_rych=140
[BDmeer]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=2.tr.+služební oddíl
hmotnost=40
delka=25
osob=80
max_rych=140
[BDbmsee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=2.tr.+služební oddíl
hmotnost=40
delka=25
osob=80
max_rych=140
[BDs]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=2.tr.+služební oddíl
hmotnost=40
delka=25
osob=80
max_rych=140
[Beel]
typ=vuz
podtrida=Osobní vozy
popis=2.tr.
hmotnost=40
delka=25
osob=80
max_rych=140
[Bdmtee]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. velkoprostor.
hmotnost=40
delka=25
osob=80
max_rych=140
[Bpee]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. velkoprostor.
hmotnost=40
delka=25
osob=80
max_rych=140
[Bpb]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. velkoprostor.
hmotnost=40
delka=25
osob=80
max_rych=140
[Bt]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. velkoprostor.
hmotnost=40
delka=25
osob=80
max_rych=140
[Bp]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. velkoprostor.
hmotnost=40
delka=25
osob=80
max_rych=140
[Bmz]
typ=vuz
podtrida=Osobní vozy
popis=2.tr. velkoprostor.
hmotnost=40
delka=25
osob=80
max_rych=160
[Bmx]
typ=vuz
podtrida=Přípojné vozy
popis=2.tr. pripojny k 850
hmotnost=20
delka=20
osob=60
max_rych=100
[Bifx]
typ=vuz
podtrida=Přípojné vozy
popis=2.tr. pripojny
hmotnost=20
delka=20
osob=60
max_rych=100
[Bix]
typ=vuz
podtrida=Přípojné vozy
popis=2.tr. pripojny
hmotnost=20
delka=20
osob=60
max_rych=100
[Btx]
typ=vuz
podtrida=Přípojné vozy
popis=2.tr. pripojny
hmotnost=20
delka=20
osob=60
max_rych=100
[D]
typ=vuz
podtrida=Služební vozy
popis=pro Mn vlaky
hmotnost=20
delka=15
max_rych=80
[Ds]
typ=vuz
podtrida=Služební vozy
popis=Služební vůz pro osobní vlaky
hmotnost=20
delka=20
max_rych=140
[WLABmee]
typ=vuz
podtrida=Osobní vozy
popis=
hmotnost=20
osob=80
delka=20
max_rych=140
[WRRm]
typ=vuz
podtrida=Osobní vozy
popis=
hmotnost=20
osob=80
delka=20
max_rych=160
[WRRmz]
typ=vuz
podtrida=Osobní vozy
popis=
hmotnost=20
osob=80
delka=20
max_rych=160
[Habbilns]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[Hadgs]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[Lp]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[Nas]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[Post]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na poštu
hmotnost=12
delka=23
typ_nakladu=pošta
naklad=30
max_rych=100
[Ra]
typ=vuz
podtrida=Nákladní vozy
popis=Cisterna
hmotnost=12
delka=23
typ_nakladu=kapaliny
naklad=30
max_rych=100
[Sqs]
typ=vuz
podtrida=Nákladní vozy
popis=Plošinový vůz
hmotnost=12
delka=23
typ_nakladu=kusové zboží
naklad=30
max_rych=100
[Vte]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na drevo
hmotnost=12
delka=23
typ_nakladu=drevo
naklad=30
max_rych=100
[Wap]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypke hmoty
naklad=30
max_rych=100
[Z]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
[Zaz]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
[Zts]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
[Ztsc]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
[Ztsc_Post]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
[681 Ampz]
typ=hnaci
podtrida=El. jednotky
popis=El. dvojsystemova jednotka
vykon=2000
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
sila=227
elektro=2
osob=200
[682 Bmpz]
typ=hnaci
podtrida=El. jednotky
popis=El. dvojsystemova jednotka
vykon=2000
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
sila=227
elektro=2
osob=200
[081 Ampz]
typ=vuz
podtrida=El. jednotky
popis=El. jednotka - vlozeny vuz
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
osob=200
[683 BRmpz]
typ=hnaci
podtrida=El. jednotky
popis=El. dvojsystemova jednotka
vykon=2000
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
sila=227
elektro=2
osob=200
[684 Bmpz]
typ=hnaci
podtrida=El. jednotky
popis=El. dvojsystemova jednotka
vykon=2000
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
sila=227
elektro=2
osob=200
[084 Bmpz]
typ=vuz
podtrida=El. jednotky
popis=El. jednotka - vlozeny vuz
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
osob=200
[082 Bmpz]
typ=vuz
podtrida=El. jednotky
popis=El. jednotka - vlozeny vuz
hmotnost=82
delka=17
max_rych=240
max_rych_hnaci=240
osob=200
[560]
typ=hnaci
podtrida=El. jednotky
popis=El. střídavá jednotka
vykon=1000
hmotnost=40
delka=17
max_rych=120
max_rych_hnaci=120
elektro=2
osob=200
[560r]
typ=hnaci
podtrida=El. jednotky
popis=El. střídavá jednotka
vykon=1000
hmotnost=40
delka=17
max_rych=120
max_rych_hnaci=120
elektro=2
osob=200
[060]
typ=vuz
podtrida=El. jednotky
popis=El.stříd.jedn.-vůz
hmotnost=40
delka=17
max_rych=120
max_rych_hnaci=120
osob=200
[MUV69]
typ=hnaci
podtrida=Spec.vozidla
popis=Motorový univerzální vozík
vykon=80
hmotnost=8
delka=20
max_rych=80
max_rych_hnaci=80
[Cem]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Eas]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Falls]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Hacgs]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Hbcks]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Ibops]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Iqrs]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Kbkks]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Smm]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Smml]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Zae]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Zaes]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
[Zkks]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
<file_sep>/php/vini/getimg.php
<?
//obejiti vypnute register-globals
foreach ($_GET as $key => $value) {
// echo "KlÝÀ: <b>$key</b>; Hodnota: <b>$value</b><br>\n";
$$key=$value;
}
$id*=1;
$obrid*=1;
if ($obrid) {
$id=$id.'_'.$obrid;
}
if (File_Exists('img/v_'.$id.'.png')) {
header('Content-type: image/png');
$fp=fopen('img/v_'.$id.'.png','rb');
echo fread($fp,filesize('img/v_'.$id.'.png'));
fclose($fp);
} else {
header('Content-type: image/png');
include('img/vuz_default.png');
}
?>
<file_sep>/php/vini/obre.php
<?
include "./root.php";
include "{$root}obecne.php";
include "{$root}sql.php";
headers();
header('Content-type: text/plain; charset=windows-1250');
echo "Prepis jmen souboru\n";
echo ";(c) bbf 2010\n";
echo ";Datum vytvoreni: ".Date("d. M Y H:i:s")."\n\n\n\n\n";
//default hlavicka
//generuji seznam
$res=mysql_query("SELECT * FROM vozy;");
while ($dat=mysql_fetch_array($res)) {
echo "VUZ {$dat['nazev']}\n";
if (File_exists("img/v_{$dat['id']}.png")) {
// echo "Ma zakl. obr.\n";
} else {
/* echo "---\n";
if (File_exists("img/old_{$dat['nazev']}.png")) {
echo "Exist. puvodni\n";
rename("img/old_{$dat['nazev']}.png","img/v_{$dat['id']}.png");
}*/
};
$oi=1;
while (File_exists("img/old__{$oi}_{$dat['nazev']}.png")) {
echo "Exist. puvodni EO $oi\n";
rename("img/old__{$oi}_{$dat['nazev']}.png","img/v_{$dat['id']}_{$oi}.png");
$oi++;
// "img/v_{$dat['id']}_$oi.png"
}
if ($oi-->1) {
echo "Ma $oi rozs. obr.\n";
} else {
echo "neni img/old__{$oi}_{$dat['nazev']}.png";
}
echo "\n";
};
?>
<file_sep>/php/vini/index.php
<?
include './root.php';
include "{$root}obecne.php";
include "{$root}sql.php";
if (!isset($akce)) $akce = "";
if (!isset($pack)) $pack = "";
function obrfrm($id,$oa,$altid) {
echo "<h3>Změna obrázků vozu č. $id</h3>";
echo "<p>Pozor - změny obrázků se nepřenáší do STAGu automaticky při synchronizaci, je nutné zvolit příkaz \"Obnovit databázi obrázků\"!
Automaticky se přenáší pouze nové základní obrázky (které nebyly v databázi v době vydání STAGu).
</p>";
//formulare akci
if ($oa=='load') {
echo <<<EOT
<h4>Změna obrázku:</h4>
<form method="post" enctype="multipart/form-data" action="index.php">
<input type="hidden" name="akce" value="obredit" />
<input type="hidden" name="id" value="$id" />
<input type="hidden" name="altid" value="$altid" />
<input type="hidden" name="oa" value="save" />
Tlačítkem vedle políčka vyhledejte soubor s obrázkem <b>typu PNG</b> k natažení do databáze na svém disku:
<input type="file" name="userfile" height="15" /><br>
Potvrďte natažení souboru tlačítkem (akce může trvat až 4 minuty při velikosti souboru 1 MB, 30 s při 100 kB - na modemu):
<input type="submit" value="Odešli soubor" />
</form>
EOT;
}
if ($oa=='default') {
//stary default -> temp
rename("img/v_{$id}.png","img/v_{$id}_TMP.png");
rename("img/v_{$id}_{$altid}.png","img/v_{$id}.png");
rename("img/v_{$id}_TMP.png","img/v_{$id}_{$altid}.png");
}
if ($oa=='delalt') {
unlink("img/v_{$id}_{$altid}.png");
//existuje obrazek s vyssim altid? pokud ano, prejmenovat nejvyssi za tento!
$oi=$altid+1;
while (File_exists("img/v_{$id}_{$oi}.png")) {
$oi++;
};
$oi--;
if ($oi>$altid) {rename("img/v_{$id}_{$oi}.png","img/v_{$id}_{$altid}.png");}
}
if ($oa=='save') {
$fname = $_FILES['userfile']['name'];
echo "<p>Soubor $fname</p>";
if(!is_uploaded_file($_FILES['userfile']['tmp_name'])) {
echo "<h3>Chyba serveru - soubor se nepřenesl na server!</h3>";
};
if (is_uploaded_file($_FILES['userfile']['tmp_name'])){
$fileSize = $_FILES['userfile']['size'];
if ($fileSize<100000){
@unlink("./img/tempimg");
move_uploaded_file ($_FILES['userfile']['tmp_name'], "./img/tempimg");
//test spravnosti obrazku
$attr = getimagesize("./img/tempimg");
echo "<p>Zpracování: obrázek typu {$attr['mime']}, {$attr[0]} x {$attr[1]} px.</p>";
if (!$attr[0]) {
echo "<h3>Chybný obrázek! Zpracování zrušeno.</h3>";
} else {
if ($attr[1]<>40||$attr['mime']!='image/png') {
echo "<p><b>Snažte se dodržovat svislý rozměr obrázků 40 px a formát PNG!</b> Obrázek bude upraven a může dojít ke zhoršení jeho kvality. ";
$fp=fopen("./img/tempimg",'rb');
$obrfile=fread($fp,filesize("./img/tempimg"));
fclose($fp);
$obr=imagecreatefromstring($obrfile);
if (!$obr) {
echo "<h3>CHYBA - Obrázek nelze převést.</h3>";
layout_end();
die();
};
@unlink("./img/tempimg");
if ($attr[1]<>40) {
// $obrout=imagecreate(round($attr[0]*40.0/$attr[1]),40);
// imagecopyresampled($obrout,$obr,0,0,0,0,round($attr[0]*40.0/$attr[1]),40,$attr[0],$attr[1]);
// imagepng($obrout,"./img/tempimg");
// echo "[upravena výška - možné rozostření!]";
echo "[automaticka uprava vysky byla vypnuta.]";
imagepng($obr,"./img/tempimg");
} else {
imagepng($obr,"./img/tempimg");
}
echo "</p>\n\n";
}
//prenos souboru
if (!$altid) {
@unlink("./img/v_{$id}.png");
rename("./img/tempimg","./img/v_{$id}.png");
} else {
@unlink("./img/v_{$id}_{$altid}.png");
rename("./img/tempimg","./img/v_{$id}_{$altid}.png");
}
echo "<h3>Soubor byl uložen.</h3>";
}
} else {
echo "<h3>Zpracování zrušeno - maximální velikost souboru je 100 kB!</h3>";
}
}
}
echo "<h4>Základní obrázek vozu:</h4>";
if (File_exists("./img/v_$id.png")) {
echo "<img src=\"getpng.php?file=v_$id.png\" /><br />";
echo "<a href=\"index.php?akce=obredit&oa=load&id=$id\">Změnit obrázek</a>";
$oi=1;
if (File_exists("img/v_{$id}_{$oi}.png")) {
echo "<h4>Alternativní obrázky:</h4>";
do {
echo "<br>\n";
echo "<b>$oi</b>: <img src=\"getpng.php?file=v_{$id}_{$oi}.png\" /><br />";
echo "<a href=\"index.php?akce=obredit&oa=delalt&id=$id&altid=$oi\">Smazat</a>
<a href=\"index.php?akce=obredit&oa=load&id=$id&altid=$oi\">Změnit obrázek</a>
<a href=\"index.php?akce=obredit&oa=default&id=$id&altid=$oi\">Nastavit jako základní</a>
";
$oi++;
} while (File_exists("img/v_{$id}_{$oi}.png"));
}
echo "<br><br><a href=\"index.php?akce=obredit&oa=load&id=$id&altid=$oi\">Přidat alternativní obrázek</a>";
} else {
echo "<p><b>Vůz nemá definován žádný obrázek</b></p>";
echo "<br><br><a href=\"index.php?akce=obredit&oa=load&id=$id\">Přidat obrázek</a>";
}
}
function vozyfrm($id) {
global $sql_id;
if ($id==0) {
$dat=array();
echo "<h3>Nový vůz</h3>";
echo "<p>Obrázek vozu přidejte až po vložení vozu do databáze!</p>";
$dat=array('sila'=>0,'vykon'=>0,'osob'=>0,'naklad'=>0,'max_rych'=>0,'max_rych_hnaci'=>0,'naklad'=>0,'delka'=>0,'hmotnost'=>0);
} else {
$dat=mysqli_fetch_array(mysqli_query($sql_id,"SELECT * FROM vozy WHERE id=$id"));
echo "<h3>Editace vozu (identifikátor vozu: $id)</h3>";
if (File_exists("img/v_{$dat['id']}.png")) {
echo "<img src=\"img/v_{$dat['id']}.png\" />";
$oi=1;
while (File_exists("img/v_{$dat['id']}_$oi.png")) {
echo "<br>\n";
echo "<img src=\"img/v_{$dat['id']}_$oi.png\" /> (alternativní obrázek $oi)";
$oi++;
}
} else {
echo "Vůz nemá definován žádný obrázek";
}
echo "<br><form action=\"#\" method=\"get\"><input type=\"hidden\" name=\"akce\" value=\"obredit\" /><input type=\"hidden\" name=\"id\" value=\"$id\" /><input type=\"submit\" value=\"Změnit obrázek\" /></form>";
};
//tabulka
echo "<form onsubmit=\"return overfrm(this);\" action=\"#\" method=\"post\"><input type=\"hidden\" name=\"akce\" value=\"save\" /><table width=\"100%\">\n";
//id vozu
echo "<tr><td width=\"40%\">Typ vozu:</td><td width=\"60%\">";
echo "<input type=\"text\" name=\"nazev\" value=\"{$dat['nazev']}\"
onmouseover=\"document.getElementById('naptex').innerHTML='Typ vozu v <i>novém</i> značení ČD (např. 810), pokud neexistuje, tak staré značení včetně tečky (např. 354.1); <b>Fiktivní vozy nebo vozy cizích železničních správ </b> mimo ŽSR <b>uvádějte s jménem dané správy ZA typem</b>, např. 1114_OBB. ;\"
/> (např. 810 (vozy ČD, ČSD, ŽSR), 1114_OBB (cizí vozy, fiktivní vozy))";
echo "</td></tr>";
//id vozu
echo "<tr><td>Popis:</td><td>\n";
echo "<input type=\"text\" name=\"popis\" value=\"{$dat['popis']}\" maxlength=\"250\"
onmouseover=\"document.getElementById('naptex').innerHTML='Stručný popis nad hranice toho, co určuje třída vozu - např. Dieselelektrická nákladní lokomotiva';\"
size=\"70\" />\n";
echo "</td></tr>\n";
//vyrobce vozu
echo "<tr><td>Výrobce, začátek dodávek:</td><td>\n";
echo "<input type=\"text\" name=\"vyrobce\" value=\"{$dat['vyrobce']}\" maxlength=\"100\"
onmouseover=\"document.getElementById('naptex').innerHTML='Stručný popis výrobce a datování - např. Škoda (1974) nebo Škoda (1962), reko ČKD 1989';\"
size=\"70\" />\n";
echo "</td></tr>\n";
//hnaci vozidla
echo "<tr><td>Hnací/tažený vůz:</td><td>\n";
echo "<select name=\"hnaci\" id=\"hnhn\"
onmouseover=\"document.getElementById('naptex').innerHTML='Zvol si, zda je vůz <b>hnací</b> - lokomotiva nebo <i>řídící vůz</i> či <b>tažený</b> - normální vagon. Přípojný vůz k motoráku nebo vnitřní vůz (bezmotorový) elektrické jednotky je <i>tažený</i>, nikoliv hnací! - někteří to nechápou:)';\"
onchange=\"if (document.getElementById('hnhn').value=='1') {document.getElementById('naphn').style.display='inline';document.getElementById('napta').style.display='none';document.getElementById('thn').style.display='';} else {document.getElementById('napta').style.display='inline';document.getElementById('naphn').style.display='none';document.getElementById('thn').style.display='none';}; \"><option value=\"1\" ".($dat['hnaci']?"selected=\"selected\"":"").">Hnací</option><option value=\"0\"" .(!$dat['hnaci']?"selected=\"selected\"":"").">Tažený</option> \n";
echo "</select></td></tr>\n";
//trida vozidla
echo "<tr><td>Třída:</td><td>\n";
echo "<input type=\"text\" name=\"trida\" value=\"{$dat['trida']}\" maxlength=\"30\" size=\"30\" id=\"trida\"
onmouseover=\"document.getElementById('naptex').innerHTML='Zvol si z výběrového pole vpravo třídu vozu nebo zapiš do pole vlastní název - tím se založí nová třída.';\"
/>\n";
echo "<< ";
//napovedne selecty
echo "<select id=\"naphn\" onchange=\"document.getElementById('trida').value=document.getElementById('naphn').value\"";
if (!$dat['hnaci']) {
echo "style=\"display:none\" ";
}
echo ">\n";
$res2=mysqli_query($sql_id,"SELECT trida FROM vozy WHERE hnaci=1 GROUP BY trida ORDER BY trida");
while ($dat2=mysqli_fetch_array($res2)) {echo "<option value=\"{$dat2['trida']}\">{$dat2['trida']}</option>\n";};
echo "</select>\n";
echo "<select id=\"napta\" onchange=\"document.getElementById('trida').value=document.getElementById('napta').value\"";
if ($dat['hnaci']) {
echo "style=\"display:none\" ";
}
echo ">\n";
$res2=mysqli_query($sql_id,"SELECT trida FROM vozy WHERE hnaci=0 GROUP BY trida ORDER BY trida");
while ($dat2=mysqli_fetch_array($res2)) {echo "<option value=\"{$dat2['trida']}\">{$dat2['trida']}</option>\n";};
echo "</select>\n";
echo "</td></tr>\n";
//hmotnost
echo "<tr><td>Hmotnost:</td><td>\n";
echo "<input type=\"text\" name=\"hmotnost\" value=\"{$dat['hmotnost']}\" maxlength=\"3\" size=\"6\"
onmouseover=\"document.getElementById('naptex').innerHTML='Chjo, tohle nepotřebuje nápovědu...';\"
/> t (celé číslo)\n";
echo "</td></tr>\n";
echo "<tr><td>Délka:</td><td>\n";
echo "<input type=\"text\" name=\"delka\" value=\"{$dat['delka']}\" maxlength=\"6\" size=\"6\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš délku vozu nárazník - nárazník.';\"
/> m\n";
echo "</td></tr>\n";
//parametry hnacich vozu
echo "<tbody id=\"thn\"";
if (!$dat['hnaci']) {
echo "style=\"display:none\" ";
}
echo " >" ;
echo "<tr><td>Výkon:</td><td>\n";
echo "<input type=\"text\" name=\"vykon\" value=\"{$dat['vykon']}\" maxlength=\"5\" size=\"5\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš výkon lokomotivy. Pokud jde o řídící vůz, zapiš sem nulu! - výkon odevzdává motorový vůz, který je definován zvlášť.';\"
/> kW\n";
echo "</td></tr>\n";
echo "<tr><td>Tažná síla (0 nebo -1 pro neznámou):</td><td>\n";
echo "<input type=\"text\" name=\"sila\" value=\"{$dat['sila']}\" maxlength=\"5\" size=\"5\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš tažnou sílu lokomotivy, je-li známa.';\"
/> kN\n";
echo "</td></tr>\n";
//elektro
echo "<tr><td>Potřeba elektrifikace:</td><td>\n";
$systemy=array("3 kV =","25 kV 50 Hz", "Tram/metro (600..900 V =)","1,5 kV =","15 kV 16 2/3 Hz","ostatní");
$vaha=1;
for ($i=0;$i<count($systemy);$i++) {
echo "<input type=\"checkbox\" name=\"elektro[]\" value=\"$vaha\" id=\"elektro\" ".((($dat['elektro']&$vaha)>0)?"checked=\"checked\"":"")." /> {$systemy[$i]} <br>\n";
$vaha*=2;
}
echo "Pro vozy schopné provozu bez elektrifikace nezaškrtávejte nic!</td></tr>\n";
echo "<tr><td>Maximální rychlost, kterou hnací vozidlo vlastní silou vyvine:</td><td>\n";
echo "<input type=\"text\" name=\"max_rych_hnaci\" value=\"{$dat['max_rych_hnaci']}\" maxlength=\"3\" size=\"5\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš maximální rychlost, kterou lokomotiva vlastní silou dosáhne. Pokud jde o řídící vůz, zapiš sem maximální rychlost, kterou může souprava s řídícím vozem v čele jet.';\"
/> km/h\n";
echo "</td></tr>\n";
echo "</tbody>\n";
echo "<tr><td>Maximální rychlost:</td><td>\n";
echo "<input type=\"text\" name=\"max_rych\" value=\"{$dat['max_rych']}\" maxlength=\"3\" size=\"5\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš maximální rychlost, kterou může vůz jet. Pokud je vůz hnací, zapiš sem rychlost, kterou může maximálně jet, i když by měl být sám tažen (např. pomalá podbíječka, jedoucí sama max. 30 km/h, tažená na konci Mn vlaku může jet 80 km/h - tuto hodnotu sem napiš).';\"
/> km/h\n";
echo "</td></tr>\n";
//osob
echo "<tr><td>Obsaditelnost:</td><td>\n";
echo "<input type=\"text\" name=\"osob\" value=\"{$dat['osob']}\" maxlength=\"5\" size=\"5\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš počet osob, je-li vůz osobní - jinak nulu.';\"
/> osob\n";
echo "</td></tr>\n";
//mnozstvni nakladu
echo "<tr><td>Typ nákladu:</td><td>\n";
echo "<input id=\"tyna\" type=\"text\" name=\"typ_nakladu\" value=\"{$dat['typ_nakladu']}\"
onmouseover=\"document.getElementById('naptex').innerHTML='Sem zapiš typ nákladu, je-li vůz nákladní. Pokus se využít již užité typy nákladů, které jsou vypsány v poli vpravo.';\"
maxlength=\"100\" size=\"30\" />\n";
echo "<< <select id=\"naptn\" onchange=\"document.getElementById('tyna').value=document.getElementById('naptn').value\"
onmouseover=\"document.getElementById('naptex').innerHTML='V tomto seznamu můžeš vybrat předdefinovaný typ nákladu. Pokud potřebuješ vlastní typ, napiš ho do pole vlevo - systém automaticky založí nový typ.';\"
>";
$res2=mysqli_query($sql_id,"SELECT typ_nakladu FROM vozy GROUP BY typ_nakladu ORDER BY typ_nakladu");
while ($dat2=mysqli_fetch_array($res2)) {echo "<option value=\"{$dat2['typ_nakladu']}\">{$dat2['typ_nakladu']}</option>\n";};
echo "</select>\n";
echo "<input type=\"button\" value=\"Není nákladní\" onclick=\"document.getElementById('tyna').value='';document.getElementById('mnna').value='0';\"
onmouseover=\"document.getElementById('naptex').innerHTML='Stisk tlačítka vynuluje pole Množství nákladu a nastaví nulový typ nákladu';\"
/>";
echo "</td></tr>\n";
echo "<tr><td>Množství nákladu:</td><td>\n";
echo "<input type=\"text\" id=\"mnna\" name=\"naklad\" value=\"{$dat['naklad']}\" maxlength=\"7\" size=\"5\" onmouseover=\"document.getElementById('naptex').innerHTML='Zadejte celým číslem množství jednotek (tun, kusů, m3,... ) nákladu plně obsazeného vozu.';\"/> jednotek (ks, tun, m<sup>3</sup>...)\n";
echo "</td></tr>\n";
echo <<<EOT
<SCRIPT LANGUAGE="JavaScript"><!--
function overfrm(formular)
{
//alert("Test");
if ((formular.nazev.value)=="")
{
alert("Typ musíte vyplnit!");
formular.nazev.focus();
return false;
}/* else {
alert("Test");
} */
else if (formular.trida.value=="")
{
alert("Třídu musíte vyplnit!");
formular.trida.focus();
return false;
}
else {
return true;};
}
// -->
</script>
EOT;
echo "<tr><td>Potvrzení zadaných dat:</td><td>\n";
echo "<input type=\"submit\" name=\"\" value=\"Odeslat data\" onmouseover=\"document.getElementById('naptex').innerHTML='Stisknutím tlačítka odešlete data na server'\" /> \n";
echo "</td></tr>\n";
echo "<tr><td>Nápověda (zobraz najetím nad zadávací pole):</td><td>\n";
echo "</td></tr>\n";
echo "<tr><td colspan=\"2\" id=\"naptex\" style=\"background-color:#FFFFEE\">Zvolením vstupního pole se zobrazí nápověda</td><td>\n";
echo "</table></form>\n";
}
function confirm($id,$popis,$budakce) {
global $sql_id;
if ($id==0) {
$dat=array();
echo "<h3>Dotaz</h3>";
} else {
$dat=mysqli_fetch_array(mysqli_query($sql_id,"SELECT * FROM vozy WHERE id=$id"));
echo "<h3>Dotaz - vůz {$dat['nazev']} (identifikátor vozu: $id)</h3>";
};
echo "<p style=\"text-align:center\" width=\"100%\"><img src=\"getimg.php?typ=".strtr($dat['nazev'],' ','_')."\"></p>";
echo "<h3>$popis</h3>";
//tabulka
echo "<form action=\"#\" method=\"post\"><input type=\"hidden\" name=\"akce\" value=\"$budakce\" /><input type=\"submit\" value=\"ANO\" /></form>\n";
echo "<form action=\"#\" method=\"get\"><input type=\"submit\" value=\"NE\" /></form>\n";
}
function backbtn() {
echo "<form action=\"#\" method=\"get\"><input type=\"submit\" value=\"Zpět na seznam\" /></form>\n";
}
function obrdef($nazev) {
echo "<div class=\"obrdef\"><h4>Obrázek vozu:</h4>";
echo "</div>";
}
layout_header("Definice vozů");
global $sql_id;
/*
if ($akce&&($akce!='obrgal')&&!$_COOKIE['MANTIS_STRING_COOKIE']) {
echo "<p style=\"background-color:red;color:white;font-weight:bold\">:: NEJSTE PŘIHLÁŠEN - PŘIHLASTE SE <a href=\"http://stanicar.ic.cz/mantis\">ZDE</a>!</p>";
echo "<p>Požadovaná akce byla zrušena. Po přihlášení budete moci editovat vozy.</p>";
$akce='';
}
*/
//moznosti prace podle promenne AKCE:
//podle prilogovaneho uzivatele:
//pokud _SESSION["rights"]>0
//pridava se moznost pridani, modifikace a mazani vozu
//!$akce -> tabulka vozu, odkaz na aktualni vozy.ini
//$akce=change $id (-1=>pridani)
//$akce=save
//$akce=del
$_SESSION['userid']='puvodni';
$res=(mysqli_fetch_array(mysqli_query($sql_id,"SELECT COUNT(*) AS pocet FROM vozy")));
if (!$akce) {
echo "<p>Celkem {$res['pocet']} vozů v databázi.</p>";
};
if ($akce=='uprav') {
vozyfrm($id);
}
if ($akce=='del') {
confirm($id,"Přejete si smazat tento vůz?","del2");
}
if ($akce=='del2') {
$id*=1;
mysqli_query($sql_id,"DELETE FROM vozy WHERE id=$id;");
echo "<h3>Provedeno.</h3>";
backbtn();
}
if ($akce=='save') {
$el=0;
for ($i=0;$i<count($elektro);$i++) {
$el+=$elektro[$i];
}
$hnaci*=1;
$delka=strtr($delka,',','.');
$naklad*=1;
$max_rych*=1;
$max_rych_hnaci*=1;
$osob*=1;
$naklad*=1;
if ($id) {
$dotaz="UPDATE vozy SET nazev=\"$nazev\", popis=\"$popis\", vyrobce=\"$vyrobce\", hnaci=$hnaci, trida=\"$trida\", hmotnost=$hmotnost, delka=$delka,
vykon=$vykon, sila=$sila, max_rych=$max_rych, max_rych_hnaci=$max_rych_hnaci, elektro=$el, osob=$osob, typ_nakladu=\"$typ_nakladu\", naklad=$naklad WHERE id=$id;";
} else {
$dotaz="INSERT INTO vozy SET nazev=\"$nazev\", popis=\"$popis\", vyrobce=\"$vyrobce\", hnaci=$hnaci, trida=\"$trida\", hmotnost=$hmotnost, delka=$delka,
vykon=$vykon, sila=$sila, max_rych=$max_rych, max_rych_hnaci=$max_rych_hnaci, elektro=$el, osob=$osob, typ_nakladu=\"$typ_nakladu\", naklad=$naklad;";
}
@mysqli_query($sql_id,$dotaz);
if (!$id) {
$id=mysqli_insert_id();
}
echo "<h3>Zápis dat do databáze</h3>";
if ($err=mysqli_error()) {
echo "<p>Chyba: $err</p>\n<p>Stiskněte tlačítko Zpět v prohlížeči a ověřte, zda jsou všechny hodnoty správně zadané.</p>";
} else {
echo "<p>Provedeno.</p>";
echo "<a href=\"index.php?akce=uprav&id=$id\">Zpět na formulář úprav</a> ";
echo "<a href=\"index.php?akce=obredit&id=$id\">Obrázky vozu</a> ";
}
}
function SortOd($parametr) {
return " <a href=\"index.php?akce=$akce&so=$parametr\">a</a> <a href=\"index.php?akce=$akce&so=$parametr&desc=1\">d</a> ";
}
if (!$akce||$akce=='obrgal') {
//vypis tabulky vozu podle sekci
echo "<h3>Tabulka vozů</h3>\n";
echo "<form action=\"index.php\" method=\"GET\">Zobrazit vozidla: <select name=\"pack\">\n";
echo "<option value=\"\" ";
echo (!$pack)?"selected=\"1\"":"";
echo ">Všechna vozidla</option>\n";
echo "<option value=\"NOPACK\" ";
echo ($pack=='NOPACK')?"selected=\"1\"":"";
echo ">ČD/ŽSR/ČSD</option>\n";
//dotaz na vsechny packy
$dat=mysqli_query($sql_id,"SELECT SUBSTRING( nazev, INSTR( nazev, '_' ) +1 ) AS pack FROM `vozy` WHERE INSTR( nazev, '_' ) GROUP BY pack");
while ($res=mysqli_fetch_array($dat)) {
echo "<option value=\"{$res['pack']}\" ";
echo ($pack==$res['pack'])?"selected=\"1\"":"";
echo ">{$res['pack']}</option>\n";
}
echo "</select><input type=\"submit\" value=\"OK\"><input type=\"hidden\" name=\"so\" value=\"$so\"><input type=\"hidden\" name=\"desc\" value=\"$desc\"><input type=\"hidden\" name=\"akce\" value=\"$akce\"></form>";
echo "<table>\n";
echo "<tr><th>Název".SortOd('nazev')."</th><th>Třída ".SortOd('trida')."</th><th>Délka [m]</th><th>Hmotnost [t]</th><th>Výkon, rychlosti</th><th>Náklad</th><th>Akce</th></tr>\n";
$dotaz="SELECT * FROM vozy ";
if ($pack) {
if ($pack=="NOPACK") {
$dotaz.="WHERE NOT INSTR(nazev,'_')" ;
} else {
$dotaz.="WHERE nazev LIKE '%_$pack' " ;
};
}
if (!$so) {
$dotaz.="ORDER BY hnaci DESC, nazev;";
} else {
$dotaz.="ORDER BY hnaci DESC, $so";
if ($desc) {$dotaz.=' DESC';};
}
$res=mysqli_query($sql_id,$dotaz);
while ($dat=mysqli_fetch_array($res)) {
echo "<tr onMouseOver=\"this.bgColor='#F0F0FF';\" onMouseOut=\"this.bgColor='#FFFFFF';\">\n";
//nazev
echo "<td><b>{$dat['nazev']}</b></td>\n";
//trida
echo "<td>";
if ($dat['hnaci']) {
echo "Hnací: ";
} else {
echo "Vůz: ";
};
echo "{$dat['trida']}</td>\n";
//delka
echo "<td>{$dat['delka']}</td>\n";
//hmotnost
echo "<td>{$dat['hmotnost']}</td>\n";
//vykon, rychlosti
echo "<td>";
if ($dat['hnaci']) {
echo "P={$dat['vykon']} kW, vmax={$dat['max_rych_hnaci']} km/h";
if ($dat['sila']>0) {
echo " F={$dat['sila']} kN";
}
} else {
echo "vmax={$dat['max_rych']} km/h";
}
echo "</td>\n";
//naklad
echo "<td>";
if ($dat['osob']) {echo "Osoby: {$dat['osob']} ";};
if ($dat['naklad']) {echo "{$dat['typ_nakladu']}: {$dat['naklad']}";};
echo "</td>\n";
//akce
echo "<td>";
echo "<a href=\"?akce=uprav&id={$dat['id']}\">detail/upravit</a>";
echo " <a href=\"?akce=del&id={$dat['id']}\">smazat</a>";
echo "</td>\n";
echo "</tr>\n";
if ($akce=='obrgal') {
echo "<tr><td colspan=\"7\">\n";
//detekce obrazku
if (File_exists("img/v_{$dat['id']}.png")) {
echo "<img src=\"img/v_{$dat['id']}.png\" />";
$oi=1;
while (File_exists("img/v_{$dat['id']}_$oi.png")) {
echo "<br>\n";
echo "<img src=\"img/v_{$dat['id']}_$oi.png\" /> (alternativní obrázek $oi)";
$oi++;
}
} else {
echo "Vůz nemá definován žádný obrázek";
}
echo "</td></tr>\n";
}
}
echo "</table>\n";
}
if ($akce=='addvini') {
echo "<h3>Přidání vozů z vozy.ini</h3>";
echo "<p>Vložte část Vašeho souboru vozy.ini, kde jsou nově definované vozy, a potvrďte tlačítkem Zpracovat.</p>";
echo "<form method=\"post\"><input type=\"hidden\" name=\"akce\" value=\"autoadd\"><textarea cols=\"150\" rows=\"20\" name=\"data\"></textarea><br /><input value=\"Zpracovat\" type=\"submit\"></form>";
}
if ($akce=='verze') {
$dat=mysqli_fetch_assoc(mysqli_query($sql_id,'SELECT * FROM verze;'));
$verze=$dat['cisloverze'];
echo "<h3>Aktuální číslo verze pro STAG je $verze</h3>";
echo "<h3>Vydání nové verze:</h3>";
echo "<form>Popis: <textarea name=\"zmeny\" cols=\"60\" rows=\"5\">Sem napiš popis změn</textarea>\n";
echo "<input type=\"hidden\" name=\"akce\" value=\"newver\"><input type=\"submit\">";
echo "</form>\n";
};
if ($akce=='newver') {
mysqli_query($sql_id,'UPDATE verze SET cisloverze=cisloverze+1;') ;
$dat=mysqli_fetch_assoc(mysqli_query($sql_id,'SELECT * FROM verze;'));
$verze=$dat['cisloverze'];
echo "Vytvarim popisny soubor...<br />";
unlink("./getversion.php");
$fp=fopen("./getversion.php","wb");
fwrite($fp,"$verze\n");
fwrite($fp,"Verze $verze:\n");
fwrite($fp,"$zmeny");
fclose($fp);
chmod ("./getversion.php", 0666);
echo "<h3>Byla vydána verze $verze.</h3>";
}
function ToDB() {
global $vuz,$polozky;
if ($vuz&&$polozky['typ']&&$polozky['podtrida']) {
//insert do DB
$polozky['delka']=strtr($polozky['delka'],",",".");
mysqli_query
("INSERT INTO vozy SET hnaci=".($polozky['typ']=='hnaci'?'1':'0').", trida=\"{$polozky['podtrida']}\", nazev=\"$vuz\",
popis=\"{$polozky['popis']}\",
vykon={$polozky['vykon']},hmotnost={$polozky['hmotnost']},delka={$polozky['delka']},
osob={$polozky['osob']},typ_nakladu='{$polozky['typ_nakladu']}',max_rych={$polozky['max_rych']},
max_rych_hnaci={$polozky['max_rych_hnaci']},elektro={$polozky['elektro']},vyrobce=\"{$polozky['vyrobce']}\",
sila={$polozky['sila']},autor=\"{$_SESSION['userid']}\",naklad={$polozky['naklad']}");
if ($err=mysqli_error()) {
echo "CHYBA: $err<br />";
} else {echo "OK<br />";};
}
};
if ($akce=='autoadd') {
//pridani do DB podle poslaneho vozy.ini
//v "data" jsou INI data
echo "<h3>Zpracování INI souboru...</h3>\n";
$radky=explode("\n",$data);
for ($i=0;$i<count($radky);$i++) {$radky[i]=trim($radky[i]);};
echo "Načteno $i řádků, hledám vozy...<br />";
$vuz='';
for ($i=0;$i<count($radky);$i++) {
//hledani [
if (eregi("^\[",$radky[$i])) {
//pokud mame aspon zakladni data, dame je do DB
ToDB();
$vuz=trim(ereg_replace("\[", "", ereg_replace("\]", "", $radky[$i])));
$polozky=array('vykon'=>0,'hmotnost'=>100,'delka'=>10.0,'osob'=>0,'max_rych'=>80,
'max_rych_hnaci'=>80,'elektro'=>0,'sila'=>-1,'naklad'=>0);
echo "$vuz: ";
if (strtolower($vuz)=='default') {
echo "(ignoruji default)<br />";
$vuz='';
} else {
$res=mysqli_fetch_array(mysqli_query($sql_id,"SELECT COUNT(*) as pocet FROM vozy WHERE nazev=\"$vuz\";"));
if ($res['pocet']) {
echo " vuz jiz je v databazi <br />";
$vuz='';
} else {
echo " vuz neni v databazi, zpracovavam:<br />";
};
}
} else
if ($vuz&&trim($radky[$i])!='') {
//zname typ vozu, zpracovavame polozky
$dataradku=explode('=',$radky[$i]);
$polozky[trim(strtolower($dataradku[0]))]=trim($dataradku[1]);
echo "{$dataradku[0]} = {$dataradku[1]}<br>";
}
};
ToDB();
}
if ($akce=='obredit') {
obrfrm($id,$oa,$altid);
};
echo "<p>Stav přihlášení: ";
if ($_COOKIE['MANTIS_STRING_COOKIE']) {
echo "přihlášen do Mantisu";
} else {
echo "nepřihlášen - přihlaste se <a href=\"http://stanicar.ic.cz/mantis\">zde</a>!";
}
layout_end();
?>
<file_sep>/README.md
# stag-web
STAG web application - old web application (MySQL + PHP) that holds vehicle database for STAG - timetable editor for Stanicar
<file_sep>/php/obecne.php
<?
// include("{$root}include/session.php");
/*
KNIHOVNA PRO ZAKLADNI FUNKCE STRANEK
<NAME> (c) 2004
*/
ini_set('default_charset', 'windows-1250');
# OSETRENI NEPOVOLENYCH IP ADRES
if ($_SERVER['REMOTE_ADDR']=="192.168.3.11") {
die($_SERVER['REMOTE_ADDR'].": Nepovoleny pristup, kontaktujte flint at vlaksim.com");
}
//obejiti vypnute register-globals
foreach ($_GET as $key => $value) {
// echo "KlÝŔ: <b>$key</b>; Hodnota: <b>$value</b><br>\n";
$$key=$value;
}
foreach ($_POST as $key => $value) {
// echo "KlÝŔ: <b>$key</b>; Hodnota: <b>$value</b><br>\n";
$$key=$value;
}
foreach ($_SERVER as $key => $value) {
// echo "Klíč: <b>$key</b>; Hodnota: <b>$value</b><br>\n";
$$key=$value;
}
//konfigurace
$pata_text="© stanicar team 2003-2005";
$cesta="http://stanicar.ic.cz";
$jmeno_webu="Staničář";
$jmeno_webu_2p="Staničáře";
$zkratka_webu="Staničář";
$titul_menu="";
$nadpis_titulky="Staničář";
session_start();
if ($_SESSION["loggedadmin"]<time()) {$_SESSION["loggedadmin"]=0;};
if ($newdesign) {
$_SESSION["newdesign"]=1;
}
if ($olddesign) {
$_SESSION["newdesign"]=0;
}
/*
Layout_header($title) - vytvori zacatek stranky vcetne nadpisu
a odeslani cache-control hlavicek
*/
function Headers()
{
header('Pragma: no-cache');
header('Cache-Control: no-cache');
header('Expires: '.GMDate("D, d M Y H:i:s").' GMT');
}
function Layout_header($title,$sekce="STAG") {
global $zkratka_webu,$root;
Headers();
echo <<<EOT
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
<html>
<head>
<title>Staničář: $sekce / $title</title>
<meta content="text/html; charset=windows-1250" http-equiv="Content-Type">
<link href="{$root}newstyl.css" rel="stylesheet" type="text/css">
<link rel="SHORTCUT ICON" href="favicon.ico" type="image/x-icon"/>
</HEAD>
<BODY>
<div style="background-color:rgb(73,73,128);text-align:center;height:34px;overflow:hidden;color:white;padding:2px 10px;font-size:28px;vertical-align:center" width="100%">
$sekce
</div>
EOT;
echo <<<EOT
</div>
<div id="levanavigace" style="top:74px">
<a href="{$root}index.php">Index</a><br /><br />
<b>MANTIS</b><br />
je KO<br /><br />
<b>STAG</b><br />
<a href="{$root}stag/files" target="_blank">download</a><br /><br />
<b>Definice vozů</b><br />
<a href="{$root}vini/index.php">Zobrazit</a><br />
<a href="{$root}vini/index.php?akce=obrgal">Galerie obrázků</a><br />
<a href="{$root}vini/index.php?akce=uprav&id=0">Vložit vůz</a><br />
<a href="{$root}vini/getvini.php" target=\"_blank\">Aktuální <b>vozy.ini</b></a><br />
<a href="{$root}vini/index.php?akce=verze">Akt. číslo verze, vydání nové</a><br />
EOT;
if ($_COOKIE['MANTIS_STRING_COOKIE']) {
echo <<<EOT
<a href="{$root}vini/index.php?akce=addvini">Import z INI</a><br />
EOT;
}
echo <<<EOT
<br />
</div>
EOT;
echo "<div class=\"telotextu\">\n";
echo "<div class=\"telotextu2\">\n\n";
echo '<div class="login">';
if($session->logged_in){
echo "Vítej <b>$session->username</b>, jsi přihlášen. <br><br>"
."[<a href=\"{$root}userinfo.php?user=$session->username\">Účet</a>] "
."[<a href=\"{$root}useredit.php\">Změna údajů</a>] ";
if($session->isAdmin()){
echo "[<a href=\"{$root}admin/admin.php\">Administrace</a>] ";
}
echo "[<a href=\"{$root}process.php\">Logout</a>]";
} else {
/**
* User not logged in, display the login form.
* If user has already tried to login, but errors were
* found, display the total number of errors.
* If errors occurred, they will be displayed.
*/
if($form->num_errors > 0){
echo "<font size=\"2\" color=\"#ff0000\">".$form->num_errors." error(s) found</font>";
}
?>
<form action="<? echo $root;?>process.php" method="POST">
Jméno:<input type="text" name="user" maxlength="20" value="">
Heslo:<input type="<PASSWORD>" name="pass" maxlength="20" value="">
<td colspan="2" align="left"><input type="checkbox" name="remember"
>
Pamatovat přihlášení
<input type="hidden" name="sublogin" value="1">
<input type="submit" value="Login">
[<a href="<? echo $root;?>forgotpass.php">Poslat heslo</a>]
[<a href="<? echo $root;?>register.php">Registrace</a>]
</form>
<?
}
echo '</div>';
echo "<h1>$title</h1>\n\n";
echo "</div>";
}
/*
Layout_end($pata=1): ukonci stranku, prida paticku, je-li $pata=1
*/
function Layout_end($pata='') {
global $pata_text;
if (!$pata) {$pata=$pata_text;};
echo <<<EOT
</div>
</div>
<div class="patka">
<div class="patkatext">
$pata
</div>
</div>
</body></html>
EOT;
}
?>
<file_sep>/export/vozy/vozy.ini
;vozy.ini PHP generator
;(c) bbf 2005
;Datum vytvoreni: 30. Oct 2018 01:02:50
[default]
_verze=404
[Ua]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty speciální
hmotnost=27
delka=10
typ_nakladu=sypké hmoty
naklad=52
max_rych=50
elektro=0
vyrobce=
autor=puvodni
img=1
[Flls S]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty
hmotnost=22
delka=16
typ_nakladu=štěrk
naklad=58
max_rych=65
elektro=0
vyrobce=
autor=puvodni
img=-1
[Flls]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty
hmotnost=22
delka=16
typ_nakladu=sypké hmoty
naklad=58
max_rych=65
elektro=0
vyrobce=
autor=puvodni
img=3
imgex=1
[Zags]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=22
delka=13
typ_nakladu=tekuté hmoty
naklad=48
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=8
imgex=1
[Zaes s]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=26
delka=12
typ_nakladu=tekuté hmoty
naklad=54
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=9
[Zaes zl]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=26
delka=12
typ_nakladu=tekuté hmoty
naklad=54
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=10
[Zacens]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=24
delka=15
typ_nakladu=tekutá síra
naklad=66
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=11
imgex=1
[Ups]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=14
delka=12
typ_nakladu=sypké hmoty
naklad=27
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=12
[Ugps]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=17
delka=14
typ_nakladu=sypké hmoty
naklad=28
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=13
[Uasc c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=23
delka=14
typ_nakladu=sypké hmoty
naklad=57
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=14
[Uasc m]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypké hmoty
hmotnost=23
delka=14
typ_nakladu=sypké hmoty
naklad=57
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=15
[Uapgs]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekuté hmoty
hmotnost=24
delka=15
typ_nakladu=tekuté hmoty
naklad=48
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=16
[Uacns c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na sypke hmoty
hmotnost=19
delka=18
typ_nakladu=sypké hmoty
naklad=72
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=18
[DEC m]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekuté hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=19
[DEC c]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekuté hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=20
[DEC zl]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekuté hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=21
[DEC b]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Vuz na tekute hmoty
hmotnost=24
delka=13
typ_nakladu=tekuté hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=22
[Rmms Ka]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=28
delka=18
typ_nakladu=kabely
naklad=52
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=23
[Rmms]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=28
delka=18
typ_nakladu=kusové zboží
naklad=52
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=24
imgex=1
[Ress T]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=25
delka=20
typ_nakladu=trubky
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=25
[Ress Ka]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=25
delka=20
typ_nakladu=kabely
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=26
[Ress]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=25
delka=20
typ_nakladu=kusové zboží
naklad=55
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=27
imgex=8
[Paop A]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=28
delka=16
typ_nakladu=automobily
naklad=52
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=28
imgex=1
[Paop]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=28
delka=16
typ_nakladu=automobily
naklad=52
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=29
[Paon]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové a sypké zboží
hmotnost=23
delka=15
typ_nakladu=kusové zboží
naklad=57
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=31
[Pao Pl]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=plech
naklad=54
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=32
[Pao Kov]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=opracované ingoty
naklad=54
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=33
[Pao Ko]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=kontejnery
naklad=54
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=34
imgex=1
[Pao Ka]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=kabely
naklad=54
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=35
[Pao D]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=dřevo
naklad=54
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=36
[Pao]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=26
delka=15
typ_nakladu=kusové zboží
naklad=54
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=37
imgex=2
[Npp]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na přepravu automobilů
hmotnost=17
delka=15
typ_nakladu=automobily
naklad=18
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=39
imgex=1
[Lpsc A]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Dvojitý vuz na přepravu
hmotnost=27
delka=26
typ_nakladu=automobily
naklad=25
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=40
imgex=1
[Lpsc]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Dvojitý vuz na přepravu
hmotnost=27
delka=26
typ_nakladu=automobily
naklad=25
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=41
[Ks D]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=14
delka=13
typ_nakladu=dřevo
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=42
imgex=3
[Ks]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=Vuz na kusové zboží
hmotnost=14
delka=13
typ_nakladu=kusové zboží
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=43
[Tbis]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=15
delka=15
typ_nakladu=kusové zboží
naklad=26
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=47
[Rils b]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=19,9
typ_nakladu=kusové zboží
naklad=55
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=54
[Rils m]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=19,9
typ_nakladu=kusové zboží
naklad=55
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=55
[Rils c]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=18
typ_nakladu=kusové zboží
naklad=55
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=56
[Rils s]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=19,9
typ_nakladu=kusové zboží
naklad=55
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=57
[La]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Chladirenský vuz na kusové
hmotnost=34
delka=16
typ_nakladu=pouze potrainy
naklad=37
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=62
imgex=1
[L]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=12
delka=12
typ_nakladu=pouze potrainy
naklad=28
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=63
[Iacqr]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Chladirenský vuz na kusové
hmotnost=30
delka=16
typ_nakladu=kusové zboží
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=-1
[Haikks]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=28
delka=19
typ_nakladu=kusové zboží
naklad=56
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=68
[Hbbillns]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=17
delka=16
typ_nakladu=kusové zboží
naklad=27
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=69
imgex=1
[Habbillns]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=30
delka=24
typ_nakladu=kusové zboží
naklad=60
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=70
imgex=5
[Habbins]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=29
delka=18
typ_nakladu=kusové zboží
naklad=61
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=71
imgex=1
[Gogs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží (obilí)
hmotnost=25
delka=16
typ_nakladu=převážně obilí
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=72
[Gbs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=15
delka=16
typ_nakladu=kusové zboží
naklad=26
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=73
imgex=2
[Gbqs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží (len)
hmotnost=15
delka=14
typ_nakladu=pouze len
naklad=25
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=-1
[Gbkkqss]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=16
delka=14
typ_nakladu=kusové zboží
naklad=20
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=75
imgex=1
[Gbkkqs]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=15
delka=14
typ_nakladu=kusové zboží
naklad=24
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=76
[Gags]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Vuz na kusové zboží
hmotnost=25
delka=16
typ_nakladu=kusové zboží
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=77
[Tdns]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty (hnojiva)
hmotnost=15
delka=9,7
typ_nakladu=sypké hmoty
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=78
imgex=1
[Tdgns]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=15
delka=10
typ_nakladu=sypké hmoty
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=79
[Tcms]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=13
delka=10
typ_nakladu=sypké hmoty
naklad=20
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=80
imgex=2
[Tads zl]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=20
delka=17
typ_nakladu=sypké hmoty
naklad=60
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=81
imgex=1
[Tads s]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty s
hmotnost=20
delka=17
typ_nakladu=sypké hmoty
naklad=60
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=82
imgex=1
[Talls]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty s odklopnou stresni klapkou
hmotnost=29
delka=13
typ_nakladu=sypké hmoty
naklad=51
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=83
imgex=1
[Vtr S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=šrot
naklad=30
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=84
[Vtr U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=uhlí
naklad=30
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=85
[Vtr R]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=cukrová řepa
naklad=30
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=86
[Vtr D]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=dřevo
naklad=30
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=87
imgex=1
[Vtr]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=12
typ_nakladu=sypké hmoty
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=88
imgex=2
[Vsa S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz vysokostěný na sypke
hmotnost=24
delka=16
typ_nakladu=šrot
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=89
[Vsa U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz vysokostěný na sypke
hmotnost=24
delka=16
typ_nakladu=uhlí
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=90
[VSa]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz vysokostěný na sypke
hmotnost=24
delka=16
typ_nakladu=sypké hmoty
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=91
imgex=1
[Lhs]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty a kusové
hmotnost=21
delka=14
typ_nakladu=sypké hmoty
naklad=52
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=95
[Fcc P]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=13
delka=12
typ_nakladu=písek
naklad=27
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=96
[Fcc]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=12
delka=9,8
typ_nakladu=sypké hmoty
naklad=28
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=97
[Fals-z V]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=24
delka=12,3
typ_nakladu=vápenec
naklad=57
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=98
[Fals-z U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=24
delka=12,3
typ_nakladu=uhlí
naklad=57
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=99
imgex=1
[Fals-z]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=24
delka=12,3
typ_nakladu=sypké hmoty
naklad=57
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=100
imgex=1
[Falns V]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=26
delka=15
typ_nakladu=vápenec
naklad=47
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=101
[Falns U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=26
delka=13,5
typ_nakladu=uhlí
naklad=47
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=102
imgex=1
[Falns]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=27
delka=13,5
typ_nakladu=sypké hmoty
naklad=47
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=103
imgex=1
[Faccs V]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=12,7
typ_nakladu=vápenec
naklad=56
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=104
[Faccs S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=12,7
typ_nakladu=štěrk
naklad=56
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=105
imgex=2
[Faccs P]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=12,7
typ_nakladu=písek
naklad=56
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=106
imgex=2
[Faccs]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vuz na sypke hmoty
hmotnost=23
delka=12,7
typ_nakladu=sypké hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=107
imgex=2
[Eanos]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty s plachtou
hmotnost=23
delka=14
typ_nakladu=sypké hmoty
naklad=56
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=108
imgex=8
[Eamos]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty s
hmotnost=24
delka=14
typ_nakladu=sypké hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=109
[Eas-u S]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=24
delka=14
typ_nakladu=šrot
naklad=56
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=110
[Eas-u U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=24
delka=14
typ_nakladu=uhlí
naklad=56
max_rych=90
elektro=0
vyrobce=
autor=puvodni
img=111
imgex=1
[Eas-u]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=24
delka=14
typ_nakladu=sypké hmoty
naklad=56
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=112
[Eal U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=22
delka=14
typ_nakladu=uhlí
naklad=58
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=113
[Eal]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=Vůz na sypké hmoty
hmotnost=22
delka=14
typ_nakladu=sypké hmoty
naklad=58
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=114
imgex=1
[4020_OBB]
typ=hnaci
podtrida=El. jednotky střídavé
popis=Elektrická jednotka ÖBB
hmotnost=127
delka=67,4
vykon=1200
sila=117
max_rych_hnaci=120
osob=184
max_rych=120
elektro=16
vyrobce=SGP, BES, BBC, ELIN, Siemens (1978-1987)
autor=
img=1101
[5147_OBB]
typ=hnaci
podtrida=Motorové jednotky
popis=Dvouvozová motorová jednotka ÖBB
hmotnost=89
delka=49
vykon=419
sila=135
max_rych_hnaci=120
osob=136
max_rych=120
elektro=0
vyrobce=<NAME> (1992-1994)
autor=
img=1100
[Brdu_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy, úprava pro přepravu kol
hmotnost=48
delka=24,5
osob=72
max_rych=160
elektro=0
vyrobce=<NAME> (1969-1989)
autor=puvodni
img=123
[Bdu_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy
hmotnost=46
delka=24,5
osob=80
max_rych=120
elektro=0
vyrobce=<NAME> (1969-1989)
autor=puvodni
img=125
imgex=1
[Adu_PKP]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy
hmotnost=38
delka=24,5
osob=54
max_rych=160
elektro=0
vyrobce=<NAME> (1969-1990)
autor=puvodni
img=126
[Bd_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy
hmotnost=38
delka=24,5
osob=80
max_rych=120
elektro=0
vyrobce=<NAME> (1969-1989)
autor=puvodni
img=128
imgex=1
[B_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy
hmotnost=39
delka=24,5
osob=80
max_rych=160
elektro=0
vyrobce=<NAME> (1969-1989)
autor=puvodni
img=129
imgex=1
[A_PKP]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy
hmotnost=38
delka=24,5
osob=54
max_rych=160
elektro=0
vyrobce=<NAME> (1969-1990)
autor=puvodni
img=130
imgex=1
[A_OBB]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy
hmotnost=38
delka=24,5
osob=48
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=134
imgex=1
[BD_MAV]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=úprava z vozů Ds 95-80: služební oddíl upraven pro cestující, zbytek vozu uzpůsoben pro přepravu jízdních kol.
hmotnost=38
delka=24,5
osob=8
typ_nakladu=Kola
naklad=28
max_rych=140
elektro=0
vyrobce=Szolnok, rekonstrukce pro MÁV 1996 - 2010
autor=puvodni
img=136
[B_MAV]
typ=vuz
podtrida=Osobní 2. tř.
popis=Osobní vůz 2 třídy (1982)
hmotnost=39
delka=24,5
osob=80
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=137
[A_MAV]
typ=vuz
podtrida=Osobní 1. tř.
popis=Osobní vůz 1 třídy (1972)
hmotnost=39
delka=24,5
osob=54
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=138
[A_DB]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy
hmotnost=38
delka=24,5
osob=54
max_rych=120
elektro=0
vyrobce=Waggonbau Bautzen (1973-1974)
autor=puvodni
img=142
[BRm ZSR]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Osobní vůz 2 třídy (1975, reko
hmotnost=48
delka=26,4
osob=30
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=143
[B_ZSR]
typ=vuz
podtrida=Osobní 2. tř.
popis=Osobní vůz 2 třídy (1975)
hmotnost=39
delka=24,5
osob=80
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=145
imgex=1
[Bmee_ZSSK]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy
hmotnost=48
delka=26,4
osob=66
max_rych=160
elektro=0
vyrobce=Waggonbau Bautzen (1985-1989)
autor=puvodni
img=146
imgex=2
[BD_ZSR]
typ=vuz
podtrida=Osobní 2. tř.
popis=Úprava z vozů Bc,jedna umývárna a část přilehlého oddílu upraveno na prostor pro přepravu jízdních kol.
hmotnost=38
delka=24,5
osob=58
typ_nakladu=Kola
naklad=2
max_rych=140
elektro=0
vyrobce=ŽOS Vrútky, 2004 - 2008 (modernizace vozů Bc - Waggonbau Bautzen UIC - Y)
autor=puvodni
img=147
[BDmee ZSR]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Os. se služ odd. 2 třídy (1985,
hmotnost=37
delka=26,4
osob=40
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=148
imgex=1
[A_ZSR]
typ=vuz
podtrida=Osobní 1. tř.
popis=Osobní vůz 1 třídy (1972) interval 19-41
hmotnost=42
delka=24,5
osob=54
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=149
imgex=1
[Amee_ZSR]
typ=vuz
podtrida=Osobní 1. tř.
popis=Osobní vůz 1 třídy (1985)
hmotnost=40
delka=26,4
osob=66
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=150
imgex=3
[Post 1.1]
typ=vuz
podtrida=Osobní vozy poštovní
popis=Poštovní vůz s 1 odd. (1980)
hmotnost=39
delka=24,5
typ_nakladu=pošta
naklad=16
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=156
[WLB821]
typ=vuz
podtrida=Osobní vozy lehátkový
popis=lůžkový vůz 2. třídy
hmotnost=50
delka=24,5
osob=30
max_rych=140
elektro=0
vyrobce=VEB Wagonbau Bautzen 1974
autor=puvodni
img=157
[WLAB_CD]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy
hmotnost=44
delka=24,5
osob=30
max_rych=160
elektro=0
vyrobce=<NAME> (1984-1985)
autor=puvodni
img=159
imgex=3
[Sa_CD]
typ=vuz
podtrida=Osobní vozy kombinované
popis=Osobní vůz salónní
hmotnost=38
delka=24,5
osob=12
max_rych=160
elektro=0
vyrobce=<NAME> (1968,1971)
autor=puvodni
img=160
[BRm]
typ=vuz
podtrida=Osobní vozy restaurační
popis=oddílový vůz 2. třídy s bufetovým oddílem
hmotnost=43
delka=26,4
osob=54
max_rych=140
elektro=0
vyrobce=VEB Wagonbau Bautzen 1984, reko ŽOS České Velenice 1994
autor=puvodni
img=167
imgex=1
[Bymee_CSD]
typ=vuz
podtrida=Osobní 2. tř.
popis=Osobní vůz 2 třídy (1989)
hmotnost=46
delka=26,4
osob=96
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=169
imgex=1
[Bmpz]
typ=vuz
podtrida=Osobní 2. tř.
popis=Osobní vůz 2 třídy s klimatizací
hmotnost=49
delka=26,4
osob=80
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=170
imgex=1
[Bmee]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový vůz 2. třídy (1985)
hmotnost=38
delka=26,4
osob=66
max_rych=160
elektro=0
vyrobce=rekonstrukce 1994-1995
autor=puvodni
img=172
imgex=2
[Beer]
typ=vuz
podtrida=Osobní 2. tř.
popis=osobní vůz 2. třídy s oddíly atypických velikostí
hmotnost=41
delka=24,5
osob=50
max_rych=140
elektro=0
vyrobce=modernizace MOVO 1992-1994
autor=puvodni
img=173
imgex=1
[Btee_CD]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy, ex Bt278
hmotnost=39
delka=24,5
osob=88
max_rych=120
elektro=0
vyrobce=Vagónka Studénka (1969-1975), reko ŽOS České Velenice (2005-2007)
autor=puvodni
img=175
imgex=1
[Bee]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový / kombinovaný vůz 2. třídy
hmotnost=39
delka=24,5
osob=58
max_rych=140
elektro=0
vyrobce=rekonstrukce České Velenice 1992, 1996-1997
autor=puvodni
img=177
imgex=1
[799_CD]
typ=hnaci
podtrida=Kombinované lok.elektro/diesel
popis=Lokomotiva řady 799 je dvouzdrojové hnací vozidlo;Lokomotivní řada 799 se používá výhradně pro posun lokomotiv a vozů v depech kolejových vozidel (DKV).
hmotnost=24
delka=7,24
vykon=37
sila=62
max_rych_hnaci=10
max_rych=10
elektro=0
vyrobce=ČMKS Holding 1992-2000;Tyto lokomotivy byly rekounstruovány v letech 1992 až 2000
autor=
img=1223
imgex=2
[Bai]
typ=vuz
podtrida=Osobní 2. tř.
popis=Osobní vůz 2 třídy (1960)
hmotnost=39
delka=24,5
osob=80
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=187
[831_KZC]
typ=hnaci
podtrida=Motorové vozy
popis='Loďák' - osobní motorový vůz, historický nátěr, ex M 262.1
hmotnost=47
delka=21,2
vykon=309
sila=55
max_rych_hnaci=90
osob=56
typ_nakladu=Kola
naklad=3
max_rych=90
elektro=0
vyrobce=Vagonka Tatra Studénka (1952-1959)
autor=
img=1095
imgex=1
[Ampz]
typ=vuz
podtrida=Osobní 1. tř.
popis=Osobní vůz 1 třídy s klimatizací
hmotnost=47
delka=26,4
osob=58
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=191
imgex=3
[Amee]
typ=vuz
podtrida=Osobní 1. tř.
popis=Osobní vůz 1 třídy (1985)
hmotnost=40
delka=26,4
osob=66
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=192
[Aee]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1 třídy, rekonstrukce, interval 19-70
hmotnost=46
delka=24,5
osob=54
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=194
imgex=2
[Bftn791]
typ=hnaci
podtrida=Řídící vozy
popis=Přípojný řídící osobní vůz
hmotnost=38
delka=19,7
vykon=0
sila=0
max_rych_hnaci=120
osob=64
typ_nakladu=Kola
naklad=2
max_rych=120
elektro=0
vyrobce=Moravskoslezská Vagonka
autor=puvodni
img=200
imgex=5
[912_CD]
typ=hnaci
podtrida=Řídící vozy
popis='Kassandra' - přípojný řídící osobní vůz
hmotnost=17
delka=14,22
vykon=0
sila=0
max_rych_hnaci=80
osob=41
max_rych=80
elektro=0
vyrobce=reko <NAME>, 2002
autor=puvodni
img=201
imgex=1
[011 ZSR]
typ=vuz
podtrida=Přípojné vozy
popis=Přípojný osobní vůz
hmotnost=16
delka=13,97
osob=53
max_rych=80
elektro=0
vyrobce=Vagonka Tatra Studénka
autor=puvodni
img=209
[860]
typ=hnaci
podtrida=Motorové vozy
popis='Chrochtadlo' - osobní motorový vůz
hmotnost=56
delka=24,5
vykon=480
sila=103
max_rych_hnaci=100
osob=70
max_rych=100
elektro=0
vyrobce=Vagonka Tatra Studénka, 1974
autor=puvodni
img=213
[854]
typ=hnaci
podtrida=Motorové vozy
popis='Katr' - motorový osobní vůz
hmotnost=50
delka=24,79
vykon=588
sila=80
max_rych_hnaci=120
osob=48
typ_nakladu=Kola
naklad=5
max_rych=120
elektro=0
vyrobce=reko <NAME>, 1997-2006
autor=puvodni
img=214
imgex=3
[853]
typ=hnaci
podtrida=Motorové vozy
popis='Hydra' - motorový osobní vůz
hmotnost=50
delka=24,79
vykon=588
sila=78
max_rych_hnaci=120
osob=48
max_rych=120
elektro=0
vyrobce=Vagonka Tatra Studénka, 1969-1970
autor=puvodni
img=215
imgex=1
[852]
typ=hnaci
podtrida=Motorové vozy
popis='Hydra' - motorový osobní vůz
hmotnost=50
delka=24,79
vykon=588
sila=78
max_rych_hnaci=120
osob=48
max_rych=120
elektro=0
vyrobce=Vagonka Tatra Studénka, 1968-69
autor=puvodni
img=216
[851]
typ=hnaci
podtrida=Motorové vozy
popis='Krokodýl' - osobní motorový vůz
hmotnost=50
delka=24,79
vykon=515
sila=77
max_rych_hnaci=110
osob=48
typ_nakladu=Kola
naklad=5
max_rych=110
elektro=0
vyrobce=Vagonka Tatra Studénka, 1967-1968
autor=puvodni
img=217
imgex=4
[850]
typ=hnaci
podtrida=Motorové vozy
popis='Krokodýl' - osobní motorový vůz
hmotnost=50
delka=24,79
vykon=515
sila=77
max_rych_hnaci=110
osob=48
typ_nakladu=Kola
naklad=5
max_rych=110
elektro=0
vyrobce=V<NAME>, 1962-1967
autor=puvodni
img=218
imgex=1
[843]
typ=hnaci
podtrida=Motorové vozy
popis='Rakev' - osobní motorový vůz
hmotnost=56
delka=25,2
vykon=600
sila=100
max_rych_hnaci=110
osob=64
typ_nakladu=Kola
naklad=5
max_rych=110
elektro=0
vyrobce=Moravskoslezská Vagonka Studénka, 1995-1997
autor=puvodni
img=219
imgex=2
[842]
typ=hnaci
podtrida=Motorové vozy
popis='Kvatro' - osobní motorový vůz
hmotnost=45
delka=25,2
vykon=424
sila=60
max_rych_hnaci=100
osob=80
typ_nakladu=Kola
naklad=8
max_rych=100
elektro=0
vyrobce=Moravskoslezská Vagonka Studénka, 1989-1994
autor=puvodni
img=220
imgex=1
[Br_KDS]
typ=vuz
podtrida=Osobní vozy kombinované
popis=Dvě třetiny vozu tvoří velkoprostorový oddíl s barem. Tento prostor slouží k různým společenským akcím a účelům, jako například svatby, oslavy, diskotéky, konference apod. Pro tyto účely je vůz vybaven profesionální audio a videotechnikou.
hmotnost=42
delka=24,5
osob=44
max_rych=120
elektro=0
vyrobce=výrobce Vagónka Studénka
autor=
img=1533
[830.1s]
typ=hnaci
podtrida=Motorové vozy
popis='Kredenc' - motorový osobní vůz
hmotnost=43
delka=21,196
vykon=301
sila=45
max_rych_hnaci=90
osob=56
typ_nakladu=Kola
naklad=3
max_rych=90
elektro=0
vyrobce=Královopolská strojírna, 1949-1950
autor=puvodni
img=223
[820]
typ=hnaci
podtrida=Motorové vozy
popis='Singrovka' - motorový osobní vůz
hmotnost=32
delka=18,5
vykon=206
sila=40
max_rych_hnaci=70
osob=56
max_rych=70
elektro=0
vyrobce=Vagonka Tatra Studénka, 1963-1964
autor=puvodni
img=224
[812_CD]
typ=hnaci
podtrida=Motorové vozy
popis='Esmeralda' - motorový osobní vůz
hmotnost=22
delka=14,47
vykon=242
sila=54
max_rych_hnaci=80
osob=38
typ_nakladu=Kola
naklad=3
max_rych=80
elektro=0
vyrobce=reko PARS Nova Šumperk, 2001
autor=puvodni
img=225
[811]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
hmotnost=20
delka=13,97
vykon=155
sila=40
max_rych_hnaci=80
osob=55
typ_nakladu=Kola
naklad=3
max_rych=80
elektro=0
vyrobce=Vagonka Tatra Studénka (1973-1984), reko PARS Nova Šumperk (1997)
autor=puvodni
img=226
imgex=2
[810]
typ=hnaci
podtrida=Motorové vozy
popis='Šukafon' - motorový osobní vůz
hmotnost=20
delka=13,97
vykon=155
sila=29
max_rych_hnaci=80
osob=55
typ_nakladu=Kola
naklad=3
max_rych=80
elektro=0
vyrobce=Vagonka T<NAME>ka, 1973-1984
autor=puvodni
img=227
imgex=1
[801]
typ=hnaci
podtrida=Motorové vozy
popis=Motorový osobní vůz
hmotnost=17
delka=12,1
vykon=114
sila=28
max_rych_hnaci=60
max_rych=60
elektro=0
vyrobce=Tatra Kopřivnice (1948)
autor=puvodni
img=228
[781]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=116
delka=17,55
vykon=1470
sila=285
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=Lokomotivka Vorošilovgrad
autor=puvodni
img=229
imgex=4
[775]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=111
delka=18
vykon=1470
sila=244
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=ČKD Praha (1961)
autor=puvodni
img=230
imgex=1
[770]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=115
delka=17,24
vykon=993
sila=280
max_rych_hnaci=90
max_rych=90
elektro=0
vyrobce=SMZ Dubnica n.Váh. (1966)
autor=puvodni
img=231
imgex=5
[759]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Lokomotiva 759 (do roku 1987 řada T 499.0) je dieselová lokomotiva s elektrickým přenosem výkonu.Pro svůj tvar čela s jedním velkým kulatým reflektorem pod oknem stanoviště strojvedoucího si vysloužila přezdívku „kyklop“.
hmotnost=85
delka=17,84
vykon=1766
sila=270
max_rych_hnaci=140
max_rych=140
elektro=0
vyrobce=ČKD Praha (1974)
autor=puvodni
img=232
[754]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=74
delka=16,5
vykon=1472
sila=180
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=ČKD Praha (1975)
autor=puvodni
img=233
imgex=11
[753]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=77
delka=16,5
vykon=1325
sila=215
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=ČKD Praha (1970)
autor=puvodni
img=234
imgex=6
[752]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=74
delka=16,5
vykon=1102
sila=215
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=ČKD Praha (1969)
autor=puvodni
img=235
[751]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=75
delka=16,5
vykon=1103
sila=215
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=ČKD Praha (1966)
autor=puvodni
img=236
imgex=1
[743]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=66
delka=13,6
vykon=800
sila=123
max_rych_hnaci=90
max_rych=90
elektro=0
vyrobce=ČKD Praha (1987)
autor=puvodni
img=237
imgex=1
[742]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=64
delka=13,6
vykon=883
sila=192
max_rych_hnaci=90
max_rych=90
elektro=0
vyrobce=ČKD Praha (1977)
autor=puvodni
img=238
imgex=9
[735]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=64
delka=14,18
vykon=927
sila=192
max_rych_hnaci=90
max_rych=90
elektro=0
vyrobce=TS Martin (1973)
autor=puvodni
img=239
imgex=2
[731]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=68
delka=15,28
vykon=600
sila=205
max_rych_hnaci=80
max_rych=80
elektro=0
vyrobce=ČKD Praha (1990)
autor=puvodni
img=240
imgex=2
[721]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=74
delka=13,26
vykon=552
sila=186
max_rych_hnaci=80
max_rych=80
elektro=0
vyrobce=ČKD Praha (1962)
autor=puvodni
img=242
imgex=1
[714]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=64
delka=14,24
vykon=600
sila=104
max_rych_hnaci=90
max_rych=90
elektro=0
vyrobce=ČKD Praha(1961, reko
autor=puvodni
img=243
imgex=2
[710]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselhydromechanická
hmotnost=41
delka=9,44
vykon=301
sila=57
max_rych_hnaci=60
max_rych=60
elektro=0
vyrobce=ČKD Praha (1961)
autor=puvodni
img=244
[708]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=37
delka=9,45
vykon=470
sila=110
max_rych_hnaci=80
max_rych=80
elektro=0
vyrobce=ČKD Praha (1997)
autor=puvodni
img=245
[704]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselelektrická lokomotiva
hmotnost=29
delka=7,5
vykon=220
sila=103
max_rych_hnaci=60
max_rych=60
elektro=0
vyrobce=ČKD Praha (1988)
autor=puvodni
img=246
imgex=2
[701]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselmechanická lokomotiva
hmotnost=22
delka=7,24
vykon=147
sila=58
max_rych_hnaci=40
max_rych=40
elektro=0
vyrobce=ČKD Praha (1957)
autor=puvodni
img=247
[700]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Dieselmechanická lokomotiva
hmotnost=22
delka=7,24
vykon=121
sila=53
max_rych_hnaci=40
max_rych=40
elektro=0
vyrobce=ČKD Praha (1955)
autor=puvodni
img=248
imgex=1
[471]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='CityElefant/Ešus/Hliník' - stejnosměrná elektrická jednotka
hmotnost=156
delka=79,2
vykon=2000
sila=99
max_rych_hnaci=140
osob=310
typ_nakladu=Kola
naklad=12
max_rych=140
elektro=1
vyrobce=ČKD Vagonka, Škoda Transportation (zač. 1997)
autor=puvodni
img=252
imgex=2
[470 z]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='Zelený Kraken' - elektrická stejnosměrná jednotka
hmotnost=266
delka=132
vykon=1952
sila=262
max_rych_hnaci=120
osob=590
max_rych=120
elektro=1
vyrobce=Moravskoslezská Vagonka Studénka, 1991
autor=puvodni
img=253
[470 s]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='<NAME>' - elektrická stejnosměrná jednotka
hmotnost=266
delka=132
vykon=1952
sila=262
max_rych_hnaci=120
osob=590
max_rych=120
elektro=1
vyrobce=Moravskoslezská Vagonka Studénka, 1991
autor=puvodni
img=254
[460 z]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='Tornádo' - stejnosměrná elektrická jednotka
hmotnost=239
delka=122,5
vykon=2000
sila=260
max_rych_hnaci=110
osob=336
max_rych=110
elektro=1
vyrobce=Vagonka Tatra Studénka, 1971-1978
autor=puvodni
img=255
[460 m]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='Tornádo' - stejnosměrná elektrická jednotka
hmotnost=239
delka=122,5
vykon=2000
sila=260
max_rych_hnaci=110
osob=336
max_rych=110
elektro=1
vyrobce=Vagonka Tatra Studénka, 1971-1978
autor=puvodni
img=256
imgex=4
[451.2]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='CityFrog/Žabotlam' - stejnosměrná elektrická jednotka
hmotnost=182
delka=95,8
vykon=1320
sila=78
max_rych_hnaci=100
osob=300
max_rych=100
elektro=1
vyrobce=Vagonka Tatra Studénka, 1964-1968
autor=puvodni
img=259
imgex=3
[451.1]
typ=hnaci
podtrida=El. jednotky stejnosměrné
popis='CityFrog/Žabotlam' - stejnosměrná elektrická jednotka
hmotnost=148
delka=72,5
vykon=1320
sila=78
max_rych_hnaci=100
osob=211
max_rych=100
elektro=1
vyrobce=V<NAME>, 1964-1968
autor=puvodni
img=260
imgex=1
[372]
typ=hnaci
podtrida=El. vícesystémové
popis='Bastard' - elektrická lokomotiva dvousystémová
hmotnost=84
delka=16,8
vykon=3080
sila=243
max_rych_hnaci=120
max_rych=120
elektro=17
vyrobce=Škoda Plzeň, 1988-1991
autor=puvodni
img=263
imgex=1
[371]
typ=hnaci
podtrida=El. vícesystémové
popis='Bastard' - elektrická lokomotiva dvousystémová
hmotnost=83
delka=16,8
vykon=3080
sila=205
max_rych_hnaci=160
max_rych=160
elektro=17
vyrobce=Škoda Plzeň, 1996-2001
autor=puvodni
img=264
imgex=3
[BDsee [ZSSK]]
typ=vuz
podtrida=Osobní 2. tř.
popis= osobní/služební
hmotnost=25
delka=24,5
osob=40
max_rych=140
elektro=0
vyrobce=VEB Waggonbau Bautzen
autor=
img=999
[643_RJ]
typ=hnaci
podtrida=Motorové jednotky
popis='Talent' - motorová jednotka
hmotnost=92
delka=49,3
vykon=630
sila=100
max_rych_hnaci=120
osob=148
max_rych=120
elektro=0
vyrobce=Bombardier Transportation (1996-)
autor=
img=1092
[Ampz_RJ]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy s klimatizací, zásuvky, reko 2002-2010
hmotnost=44
delka=26,4
osob=60
max_rych=200
elektro=0
vyrobce=Jenbacher (1977-1979)
autor=
img=1093
[Bmz_RJ]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy, klimatizace, reko 2001-2009
hmotnost=51
delka=26,4
osob=66
max_rych=200
elektro=0
vyrobce=SGP Graz, SGP Simmering/Fiat Torino (1976-1980)
autor=
img=1094
imgex=2
[Ua S]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na sypke hmoty speciální
hmotnost=27
delka=10
typ_nakladu=uhlí
naklad=52
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=311
[Fas]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na kontejnerové vany
hmotnost=31
delka=16
typ_nakladu=kontejnerové vany
naklad=61
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=312
[Fbks]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Vuz na roztavenou ocel
hmotnost=16
delka=13
typ_nakladu=roztavená ocel
naklad=24
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=313
[Uaain]
typ=vuz
podtrida=Nákladní vozy speciální
popis=Hlubiný vuz speciální stavby
hmotnost=70
delka=28
typ_nakladu=PLM zásilky
naklad=140
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=314
[Raj]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypké hmoty
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=322
imgex=1
[011]
typ=vuz
podtrida=Přípojné vozy
popis=Pripojny vuz
hmotnost=12
delka=14
osob=85
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=323
imgex=2
[705]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Úzkorozchodná diesel (1954)
hmotnost=32
delka=13
vykon=260
sila=0
max_rych_hnaci=50
max_rych=50
elektro=0
vyrobce=
autor=puvodni
img=324
[720]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Motorová lokomotiva s elektrickým přenosem výkonu
hmotnost=61
delka=12,4
vykon=551
sila=160
max_rych_hnaci=60
max_rych=60
elektro=0
vyrobce=ČKD (1958-1961)
autor=puvodni
img=325
[749]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Lokomotiva ČKD (1966)
hmotnost=75
delka=16,5
vykon=1103
sila=150
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=326
imgex=19
[750]
typ=hnaci
podtrida=Dieselové lokomotivy
popis=Lokomotiva ČKD (1975)
hmotnost=74
delka=16,5
vykon=1325
sila=215
max_rych_hnaci=100
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=327
imgex=1
[A]
typ=vuz
podtrida=Osobní 1. tř.
popis=1.tr. kupé interval 19-41
hmotnost=41
delka=24,5
osob=54
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=328
imgex=1
[AB]
typ=vuz
podtrida=Osobní vozy kombinované
popis=1.+2.tr. kupe interval 39-41
hmotnost=42
delka=24,5
osob=60
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=330
imgex=1
[BRcm ZSSK]
typ=vuz
podtrida=Osobní vozy kombinované
popis=Lehátkový vůz s bufetovým oddílem
hmotnost=43
delka=26,4
osob=54
max_rych=140
elektro=0
vyrobce=Waggonbau Bautzen (1984)
autor=puvodni
img=331
imgex=1
[B]
typ=vuz
podtrida=Osobní 2. tř.
popis=2.tr. kupe
hmotnost=40
delka=24,5
osob=80
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=332
imgex=1
[BDmeer]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=2.tr.+služební oddíl
hmotnost=40
delka=26,4
osob=40
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=334
imgex=5
[BDbmsee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=oddílový vůz 2. třídy se služebním oddílem, neRIC, zvedací plošina
hmotnost=37
delka=26,4
osob=46
max_rych=160
elektro=0
vyrobce=VEB Wagonbau Bautzen 1987
autor=puvodni
img=335
imgex=1
[BDs]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=2.tr.+služební oddíl
hmotnost=50
delka=24,5
osob=40
typ_nakladu=Kola
naklad=25
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=336
imgex=1
[Bee (Beel)]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový vůz 2. třídy
hmotnost=43
delka=24,5
osob=60
max_rych=160
elektro=0
vyrobce=rekonstrukce MOVO 1997-1998
autor=puvodni
img=337
[Bdmtee]
typ=vuz
podtrida=Osobní 2. tř.
popis=2.tr. velkoprostor.
hmotnost=46
delka=26,4
osob=96
max_rych=140
elektro=0
vyrobce=VEB Wagonbau Bautzen 1989-1990
autor=puvodni
img=338
imgex=1
[Bpee]
typ=vuz
podtrida=Osobní 2. tř.
popis=2.tr. velkoprostor.
hmotnost=41
delka=24,5
osob=78
max_rych=160
elektro=0
vyrobce=rekonstrukce MOVO 1996-1997
autor=puvodni
img=339
imgex=1
[Bt]
typ=vuz
podtrida=Osobní 2. tř.
popis=2.tr. velkoprostor.
hmotnost=39
delka=24,5
osob=88
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1969-1975
autor=puvodni
img=341
imgex=1
[Bp]
typ=vuz
podtrida=Osobní 2. tř.
popis=osobní vůz 2. třídy, 2 oddíly, sedadla s látkovými potahy
hmotnost=34
delka=24,5
osob=86
typ_nakladu=Kola
naklad=4
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1991-1992
autor=puvodni
img=342
imgex=1
[Bmz]
typ=vuz
podtrida=Osobní 2. tř.
popis=2.tr. velkoprostor.
hmotnost=40
delka=26,4
osob=80
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=343
imgex=2
[Bmx]
typ=vuz
podtrida=Přípojné vozy
popis=velkprostorový vůz 2. třídy (050)
hmotnost=32
delka=24,8
osob=80
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1962-1968
autor=puvodni
img=344
imgex=1
[Btx763]
typ=vuz
podtrida=Přípojné vozy
popis=2.tr. pripojny
hmotnost=25
delka=18,5
osob=64
max_rych=80
elektro=0
vyrobce=modernizace ŽOS Nymburk 1995-1996
autor=puvodni
img=347
imgex=1
[D]
typ=vuz
podtrida=Služební vozy
popis=pro Mn vlaky
hmotnost=20
delka=15
max_rych=80
elektro=0
vyrobce=
autor=puvodni
img=348
imgex=2
[Ds]
typ=vuz
podtrida=Služební vozy
popis=Služební vůz pro osobní vlaky
hmotnost=38
delka=24,5
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=349
imgex=5
[WLABmee61]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=lůžkový vůz 1. a 2. třídy, klimatizovaný
hmotnost=44
delka=26,4
osob=30
max_rych=160
elektro=0
vyrobce=rekonstrukce MOVO 200-2001
autor=puvodni
img=350
imgex=1
[WRRm]
typ=vuz
podtrida=Osobní vozy restaurační
popis=
hmotnost=20
delka=26,4
osob=30
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=351
imgex=3
[WRRmz]
typ=vuz
podtrida=Osobní vozy restaurační
popis=
hmotnost=57
delka=26,4
osob=44
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=352
[Habbilns]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Čtyřnápravový krytý vůz s posuvnými bočními stěnami
hmotnost=27
delka=23
typ_nakladu=kusové zboží
naklad=55
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=353
[Hadgs]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypké hmoty
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=354
imgex=1
[Lp]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypké hmoty
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=355
[Nas]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na nakl.auta
hmotnost=12
delka=23
typ_nakladu=sypké hmoty
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=356
imgex=2
[Ra]
typ=vuz
podtrida=Nákladní vozy
popis=Cisterna
hmotnost=12
delka=23
typ_nakladu=kapaliny
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=358
imgex=2
[Vte]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na drevo
hmotnost=12
delka=23
typ_nakladu=dřevo
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=360
[Wap]
typ=vuz
podtrida=Nákladní vozy
popis=Vuz na sypke hmoty
hmotnost=12
delka=23
typ_nakladu=sypké hmoty
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=361
imgex=5
[Z]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=362
[Zaz]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=363
imgex=1
[Zts]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=364
[Ztsc]
typ=vuz
podtrida=Nákladní vozy
popis=Krytý vůz
hmotnost=12
delka=23
typ_nakladu=kryté zboží
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=365
[Ztsc-Post]
typ=vuz
podtrida=Nákladní vozy uzavřené
popis=Dvounápravový krytý vůz běžné stavby
hmotnost=15
delka=14
typ_nakladu=kryté zboží
naklad=24
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=366
imgex=1
[Bvcmbz_OBB]
typ=vuz
podtrida=Osobní vozy lehátkové
popis=Vozy odkoupeny od DB v prosinci 2016.
hmotnost=50
delka=26,4
osob=50
max_rych=200
elektro=0
vyrobce=AW Halberstadt, 2001-2002
autor=
img=1514
imgex=2
[WLABmz7171_OBB]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=
hmotnost=56
delka=26,4
osob=33
max_rych=160
elektro=0
vyrobce=modernizace 1999-2000
autor=
img=1517
imgex=1
[WLABmz7290_OBB]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Vozy odkoupeny od DB v prosinci 2016.
hmotnost=60
delka=26,4
osob=36
max_rych=200
elektro=0
vyrobce=Siemens SGP Verkehrstechnik, 2003 - 2005
autor=
img=1518
imgex=3
[WLABmz7571_OBB]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkoví vůz 1. a 2. tř.
hmotnost=58
delka=26,4
osob=32
max_rych=160
elektro=0
vyrobce=
autor=
img=1519
imgex=1
[WRmz8890_OBB]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Podvozky MD 522, vozy 200 ... 207 nejsou vybaveny tlakotěsnými přechodovými návalky. V letech 2005 - 2010 proběhla modernizace interiéru (Upgrading-Programm).
hmotnost=54
delka=26,9
osob=40
max_rych=200
elektro=0
vyrobce=SGP Simmering, 1988-1989
autor=
img=1520
imgex=1
[560]
typ=hnaci
podtrida=El. jednotky střídavé
popis=El. střídavá jednotka
hmotnost=295
delka=122
vykon=840
sila=178
max_rych_hnaci=120
osob=340
max_rych=120
elektro=2
vyrobce=V<NAME>
autor=puvodni
img=374
imgex=4
[Cem]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=377
[Eas]
typ=vuz
podtrida=Nákladní vozy
popis=Nákladní otevřený vůz, prázdný (kvůli Staničáři snížena rychlost na 100 km/h)
hmotnost=12
delka=14
typ_nakladu=ostatni
naklad=30
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=378
imgex=1
[Falls]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=výsypný čtyřnápravový vůz
hmotnost=26
delka=13,5
typ_nakladu=uhlí
naklad=54
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=379
imgex=4
[Hacgs]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=380
[Ibops]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=382
[Iqrs]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=383
[Kbkks]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=13
delka=16,2
typ_nakladu=ostatni
naklad=23
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=384
[Smm]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=385
[Smml]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=386
[Zae]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Nedefinovano
hmotnost=22
delka=12,3
typ_nakladu=tekuté hmoty
naklad=43
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=387
[Zaes]
typ=vuz
podtrida=Nákladní vozy cisternové
popis=Nedefinovano
hmotnost=23
delka=12,8
typ_nakladu=tekuté hmoty
naklad=57
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=388
imgex=1
[Zkks]
typ=vuz
podtrida=Nákladní vozy
popis=Nedefinovano
hmotnost=12
delka=23
typ_nakladu=ostatni
naklad=30
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=389
imgex=1
[650_VBG]
typ=hnaci
podtrida=Motorové jednotky
popis=Regio-Shuttle RS1
hmotnost=43
delka=25,5
vykon=530
sila=0
max_rych_hnaci=120
osob=71
max_rych=120
elektro=0
vyrobce=Stadler Pankow GmbH. (2012)
autor=
img=1098
imgex=1
[ASmz_RJ]
typ=vuz
podtrida=Osobní 2. tř.
popis=Kombinovaný vůz 2. třídy, klimatizace, 'Internet Café', reko 2005
hmotnost=48
delka=26,4
osob=39
max_rych=200
elektro=0
vyrobce=SGP Simmering (1982)
autor=
img=1099
[801_KZC]
typ=hnaci
podtrida=Motorové vozy
popis='Hurvínek' - historický motorový osobní vůz, ex M131.1
hmotnost=17
delka=12,1
vykon=114
sila=28
max_rych_hnaci=60
osob=48
max_rych=60
elektro=0
vyrobce=<NAME> (1951)
autor=
img=1096
[642_VBG]
typ=hnaci
podtrida=Motorové jednotky
popis=Motorová jednotka Vogtlandbahn
hmotnost=90
delka=45
vykon=550
sila=90
max_rych_hnaci=120
osob=110
max_rych=120
elektro=0
vyrobce=Siemens/Düwag 2000
autor=
img=1097
[610_DB]
typ=hnaci
podtrida=Motorové jednotky
popis=Motorová jednotka DB Regio
hmotnost=100
delka=50
vykon=970
sila=100
max_rych_hnaci=160
osob=136
max_rych=160
elektro=0
vyrobce=MAN, DUEWAG (1991-1992)
autor=puvodni
img=411
[612_DB]
typ=hnaci
podtrida=Motorové jednotky
popis=Motorová jednotka DB Regio
hmotnost=116
delka=51,75
vykon=1120
sila=100
max_rych_hnaci=160
osob=146
max_rych=160
elektro=0
vyrobce=<NAME>, Bombardier Transportation (1998)
autor=puvodni
img=412
[642_DB]
typ=hnaci
podtrida=Motorové jednotky
popis='Desiro' - motorová jednotka DB Regio a VLB
hmotnost=69
delka=41,7
vykon=550
sila=90
max_rych_hnaci=120
osob=110
max_rych=120
elektro=0
vyrobce=Siemens Mobility (1999)
autor=puvodni
img=413
imgex=1
[4024_OBB]
typ=hnaci
podtrida=El. jednotky střídavé
popis='Talent' - dvousystémová elektrická jednotka ÖBB
hmotnost=116
delka=66,87
vykon=1520
sila=100
max_rych_hnaci=140
osob=199
max_rych=140
elektro=16
vyrobce=Bombaridier, 1996
autor=puvodni
img=418
[5047_OBB]
typ=hnaci
podtrida=Motorové vozy
popis=Rakouský motorový vůz určený na regionální tratě
hmotnost=40
delka=20
vykon=419
sila=68
max_rych_hnaci=120
osob=62
max_rych=120
elektro=0
vyrobce=1987
autor=puvodni
img=419
imgex=1
[Admnu61_PKP]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy, klimatizace, zásuvky
hmotnost=48
delka=26,4
osob=54
max_rych=160
elektro=0
vyrobce=<NAME> - FPS Poznań (1995-1997), reko (2006-2007)
autor=puvodni
img=420
imgex=4
[Bdmnu61_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy, klimatizace
hmotnost=48
delka=26,4
osob=66
max_rych=160
elektro=0
vyrobce=PESA Bydgoszcz
autor=puvodni
img=421
[Bdhmnu61_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy, klimatizace
hmotnost=48
delka=26,4
osob=80
max_rych=200
elektro=0
vyrobce=<NAME> - FPS Poznań (1999-2003)
autor=puvodni
img=422
imgex=3
[WRdm_PKP]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Restaurační vůz
hmotnost=45
delka=26,4
osob=40
max_rych=160
elektro=0
vyrobce=Pafawag Wroclaw (1969-1990)
autor=puvodni
img=423
[Bdmnu_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy, některé klimatizace
hmotnost=42
delka=26,4
osob=66
max_rych=160
elektro=0
vyrobce=<NAME>, <NAME>, <NAME> - FPS Poznań (1989-1995)
autor=puvodni
img=424
imgex=3
[Bdnu_PKP]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy
hmotnost=42
delka=24,5
osob=80
max_rych=140
elektro=0
vyrobce=<NAME> (1969-1990)
autor=puvodni
img=425
imgex=1
[Bc_PKP]
typ=vuz
podtrida=Osobní vozy lehátkové
popis=Lehátkový vůz
hmotnost=45
delka=24,5
osob=72
max_rych=160
elektro=0
vyrobce=<NAME> (1970-1977)
autor=puvodni
img=426
imgex=2
[WLAB_PKP]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy
hmotnost=44
delka=24,5
osob=30
max_rych=160
elektro=0
vyrobce=Pafaw<NAME> (1969-1990)
autor=puvodni
img=427
imgex=1
[EP09_PKP]
typ=hnaci
podtrida=El. stejnosměrné
popis=elektrická stejnosměrná lokomotiva
hmotnost=84
delka=16,74
vykon=2940
sila=210
max_rych_hnaci=160
max_rych=160
elektro=1
vyrobce=<NAME> (1986-1997)
autor=puvodni
img=428
imgex=19
[Aeer61_ZSSK]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy, klimatizace, zásuvky
hmotnost=45
delka=24,5
osob=54
max_rych=160
elektro=0
vyrobce=reko ŽOS Vrútky (2003-2005)
autor=puvodni
img=429
imgex=5
[Apeer61_ZSSK]
typ=vuz
podtrida=Osobní 1. tř.
popis=Velkoprostorový vůz 1. třídy, klimatizace, zásuvky
hmotnost=44
delka=24,5
osob=52
max_rych=160
elektro=0
vyrobce=reko ŽOS Vrútky (1996-2002)
autor=puvodni
img=430
imgex=3
[ARpeer61_ZSSK]
typ=vuz
podtrida=Osobní vozy kombinované
popis=Velkoprostorový vůz 1. třídy s barovým oddílem, klimatizace, zásuvky
hmotnost=45
delka=24,5
osob=39
max_rych=160
elektro=0
vyrobce=reko ŽOS Vrútky (2001)
autor=puvodni
img=431
imgex=4
[Bpeer61_ZSSK]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy, klimatizace
hmotnost=43
delka=24,5
osob=66
max_rych=160
elektro=0
vyrobce=reko ŽOS Vrútky (1996-2003)
autor=puvodni
img=432
[Beer61_ZSSK]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový vůz 2. třídy, klimatizace
hmotnost=45
delka=24,5
osob=60
max_rych=160
elektro=0
vyrobce=rekonstrukce ŽOS Vrútky
autor=puvodni
img=433
imgex=1
[BDsheer61_ZSSK]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Oddílový vůz 2. třídy se služebním oddílem, klimatizace, ex Bh
hmotnost=44
delka=24,5
osob=28
max_rych=160
elektro=0
vyrobce=reko ŽOS Vrútky (1996-2001)
autor=puvodni
img=434
imgex=3
[A_ZSSK]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy
hmotnost=40
delka=24,5
osob=54
max_rych=140
elektro=0
vyrobce=Waggonbau Bautzen (1974-1985)
autor=puvodni
img=435
imgex=3
[B_ZSSK]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy
hmotnost=39
delka=24,5
osob=80
max_rych=140
elektro=0
vyrobce=Waggonbau Bautzen (1974-1985)
autor=puvodni
img=436
imgex=2
[BDshmee_ZSSK]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=Oddílový vůz 2. třídy se služebním oddílem
hmotnost=39
delka=26,4
osob=36
max_rych=160
elektro=0
vyrobce=Waggonbau Bautzen (1989-1990)
autor=puvodni
img=437
imgex=3
[Bdt_ZSSK]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy
hmotnost=34
delka=24,5
osob=86
max_rych=120
elektro=0
vyrobce=Vagónka Studénka (1990-1991)
autor=puvodni
img=438
imgex=4
[Bdt]
typ=vuz
podtrida=Osobní 2. tř.
popis=osobní vůz 2. třídy, 2 oddíly
hmotnost=34
delka=24,5
osob=88
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1990-1992
autor=puvodni
img=439
imgex=1
[Bc_ZSSK]
typ=vuz
podtrida=Osobní vozy lehátkové
popis=Lehátkový vůz
hmotnost=42
delka=24,5
osob=54
max_rych=140
elektro=0
vyrobce=Waggonbau Bautzen (1975-1984)
autor=puvodni
img=440
imgex=3
[WLAB_ZSSK]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy
hmotnost=46
delka=24,5
osob=30
max_rych=140
elektro=0
vyrobce=Waggonbau Görlitz (1978-1985)
autor=puvodni
img=441
imgex=4
[WLABmee62_ZSSK]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy, klimatizace
hmotnost=56
delka=26,4
osob=33
max_rych=200
elektro=0
vyrobce=Waggonbau Görlitz (1994-1995)
autor=puvodni
img=442
imgex=2
[Ampz73]
typ=vuz
podtrida=Osobní 1. tř.
popis=velkoprostorový vůz 1. třídy, klimatizace
hmotnost=48
delka=26,4
osob=30
max_rych=200
elektro=0
vyrobce=SGP-Siemens - MSV Studénka 1998-1999
autor=puvodni
img=444
imgex=1
[Apee]
typ=vuz
podtrida=Osobní 1. tř.
popis=velkoprostorový vůz 1. třídy
hmotnost=41
delka=24,5
osob=60
max_rych=160
elektro=0
vyrobce=rekonstrukce MOVO 1996-1997
autor=puvodni
img=445
imgex=1
[Amz73_OBB]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy, klimatizace, zásuvky
hmotnost=48
delka=26,4
osob=40
max_rych=200
elektro=0
vyrobce=SGP Simmering (1989-1993), reko (2006-2007)
autor=puvodni
img=446
imgex=1
[Amz61_OBB]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy, klimatizace
hmotnost=48
delka=26,4
osob=54
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=447
imgex=1
[Amz61_MAV]
typ=vuz
podtrida=Osobní 1. tř.
popis=oddílový vůz 1. třídy, klimatizace
hmotnost=48
delka=26,4
osob=54
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=448
imgex=1
[Apmz61_H-START]
typ=vuz
podtrida=Osobní 1. tř.
popis=velkoprostorový vůz 1. třídy, klimatizace
hmotnost=46
delka=26,4
osob=60
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=449
imgex=5
[Apmz61_DB]
typ=vuz
podtrida=Osobní 1. tř.
popis=Velkoprostorový vůz 1. třídy, klimatizace
hmotnost=44
delka=26,4
osob=48
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=450
imgex=1
[Avmz61_DB]
typ=vuz
podtrida=Osobní 1. tř.
popis=Oddílový vůz 1. třídy, klimatizace
hmotnost=45
delka=26,4
osob=54
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=451
[B244]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový vůz 2. třídy, 3 oddíly kuřácké
hmotnost=39
delka=24,5
osob=80
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=452
[Bdbmrsee]
typ=vuz
podtrida=Osobní vozy se služ.oddílem
popis=oddílový vůz 2. třídy se služebním oddílem, RIC, zvedací plošina
hmotnost=38
delka=26,4
osob=33
max_rych=160
elektro=0
vyrobce=VEB Wagonbau Bautzen 1987-1988
autor=puvodni
img=453
[Bimz_DB]
typ=vuz
podtrida=Osobní 2. tř.
popis=Vůz 2. třídy kombinovaný oddíly/velkoprostorový
hmotnost=47
delka=26,4
osob=60
max_rych=200
elektro=0
vyrobce=Raw Halberstadt (1984-1990)
autor=puvodni
img=454
imgex=1
[Bpmz61_DB]
typ=vuz
podtrida=Osobní 2. tř.
popis=velkoprostorový vůz 2. třídy, klimatizace
hmotnost=45
delka=26,4
osob=80
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=455
[Bpmbz61_DB]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy, klimatizace, úprava pro vozíčkáře
hmotnost=45
delka=26,4
osob=75
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=456
[Bpmdz73_DB]
typ=vuz
podtrida=Osobní 2. tř.
popis=velkoprostorový vůz 2. třídy, klimatizace, prostor pro jízní kola
hmotnost=45
delka=26,4
osob=64
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=457
[Bvmsz73_DB]
typ=vuz
podtrida=Osobní 2. tř.
popis=Vůz 2. třídy kombinovaný oddíly/velkoprostorový, klimatizace, zásuvky
hmotnost=45
delka=26,4
osob=64
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=458
[Bmz61_MAV]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový vůz 2. třídy, klimatizace
hmotnost=51
delka=26,4
osob=66
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=459
[Wrmz61_MAV]
typ=vuz
podtrida=Osobní vozy restaurační
popis=restaurační vůz, klimatizace
hmotnost=50
delka=26,4
osob=36
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=460
imgex=2
[WRmz61_DB]
typ=vuz
podtrida=Osobní vozy restaurační
popis=Restaurační vůz, klimatizace
hmotnost=50
delka=27,5
osob=34
max_rych=200
elektro=0
vyrobce=
autor=puvodni
img=461
imgex=2
[Wrmz73]
typ=vuz
podtrida=Osobní vozy restaurační
popis=restaurační vůz, klimatizace
hmotnost=54
delka=26,4
osob=34
max_rych=200
elektro=0
vyrobce=SGP-Siemens - MSV Studénka 1997
autor=puvodni
img=462
imgex=3
[WRmee61]
typ=vuz
podtrida=Osobní vozy restaurační
popis=restaurační vůz s bufetovým oddílem, klimatizace
hmotnost=44
delka=26,4
osob=38
max_rych=160
elektro=0
vyrobce=VEB Wagonbau Bautzen 1978, rekonstrukce 1994
autor=puvodni
img=463
[WRm]
typ=vuz
podtrida=Osobní vozy restaurační
popis=restaurační vůz s bufetovým oddílem
hmotnost=44
delka=26,4
osob=38
max_rych=140
elektro=0
vyrobce=VEB Wagonbau Bautzen 1978, modernizace 1991
autor=puvodni
img=464
[WR]
typ=vuz
podtrida=Osobní vozy restaurační
popis=restaurační vůz
hmotnost=25
delka=24,5
osob=48
max_rych=140
elektro=0
vyrobce=VEB Wagonbau Bautzen 1976, rekonstrukce 1989
autor=puvodni
img=465
imgex=4
[Bmpz73_OBB]
typ=vuz
podtrida=Osobní 2. tř.
popis=Velkoprostorový vůz 2. třídy, klimatizace, zásuvky
hmotnost=45
delka=26,4
osob=83
max_rych=200
elektro=0
vyrobce=<NAME>, SGP Graz, Simmering (1990-1993), reko (2002-2009)
autor=puvodni
img=466
imgex=1
[Bmz61_OBB]
typ=vuz
podtrida=Osobní 2. tř.
popis=Oddílový vůz 2. třídy, klimatizace, zásuvky
hmotnost=51
delka=26,4
osob=66
max_rych=200
elektro=0
vyrobce=SGP Graz, SGP Simmering/Fiat Torino (1976-1982), reko (2001-2010)
autor=puvodni
img=467
imgex=2
[Bmto]
typ=vuz
podtrida=Osobní 2. tř.
popis=patrový osobní vůz 2. třídy
hmotnost=44
delka=26,8
osob=126
max_rych=100
elektro=0
vyrobce=VEB Wagonba<NAME>, 1976
autor=puvodni
img=468
[Btax]
typ=vuz
podtrida=Osobní 2. tř.
popis=přípojný vůz 2. třídy
hmotnost=15
delka=14
osob=62
typ_nakladu=Kola
naklad=6
max_rych=80
elektro=0
vyrobce=Vagónka Stdénka 1973-1983
autor=puvodni
img=469
imgex=2
[Btn]
typ=vuz
podtrida=Osobní 2. tř.
popis=velkoprostorový přípojný vůz 2. třídy
hmotnost=34
delka=24,5
osob=88
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1969
autor=puvodni
img=470
imgex=1
[Bc842]
typ=vuz
podtrida=Osobní vozy lehátkový
popis=lehátkový vůz
hmotnost=41
delka=24,5
osob=72
max_rych=160
elektro=0
vyrobce=VEB Wagonbau Bautzen 1975-1980
autor=puvodni
img=472
[Bcmz73_OBB]
typ=vuz
podtrida=Osobní vozy lehátkové
popis=Lehátkový vůz, klimatizace
hmotnost=44
delka=26,4
osob=54
max_rych=200
elektro=0
vyrobce=ADtranz (Bombardier) Dunakeszi - Siemens SGP Graz (2000-2001), <NAME> (1991)
autor=puvodni
img=473
imgex=7
[Bcmz61_OBB]
typ=vuz
podtrida=Osobní vozy lehátkové
popis=Lehátkový vůz, klimatizace
hmotnost=42
delka=26,4
osob=54
max_rych=200
elektro=0
vyrobce=<NAME> (1982), reko 1994
autor=puvodni
img=474
imgex=5
[Bdt279]
typ=vuz
podtrida=Osobní 2. tř.
popis=velkoprostorový vůz 2. třídy , 2 oddíly, pouze el. topení
hmotnost=34
delka=24,5
osob=88
typ_nakladu=Kola
naklad=4
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1986-1987
autor=puvodni
img=475
imgex=1
[BDtax]
typ=vuz
podtrida=Osobní 2. tř.
popis=přípojný vůz 2. třídy s prostorem pro rozšířenou přepravu spoluzavazadel
hmotnost=15
delka=14
osob=52
typ_nakladu=Kola
naklad=12
max_rych=80
elektro=0
vyrobce=rekonstrukce KOS Krnov 1998 - 2001
autor=puvodni
img=476
imgex=1
[B243]
typ=vuz
podtrida=Osobní 2. tř.
popis=oddílový vůz 2. třídy, doplněny látkové potahy + CZE
hmotnost=38
delka=24,5
osob=60
max_rych=140
elektro=0
vyrobce=VEB Wagonbau Bautzen 1982_1985, úprava KOS Krnov 2003
autor=puvodni
img=477
[Bt283]
typ=vuz
podtrida=Osobní 2. tř.
popis=velkoprostorový vůz 2. třídy, služební prostor pro vlak. čety (místo umývárny)
hmotnost=39
delka=24,5
osob=88
max_rych=120
elektro=0
vyrobce=Vagónka Studénka 1969-1975, úprava ŽOS České Velenice 2004-2005
autor=puvodni
img=478
imgex=1
[WLAB_BC]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz
hmotnost=57
delka=24,6
osob=30
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=479
[WLAB_RZD]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz
hmotnost=57
delka=24,6
osob=30
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=480
imgex=1
[WLABee824]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=lůžkový vůz 1. a 2. třídy, prolisy na bočnici
hmotnost=50
delka=24,5
osob=30
max_rych=140
elektro=0
vyrobce=VEB <NAME> 1981, modernizace Pars nova Šumperk 2005
autor=puvodni
img=481
imgex=2
[WLABm62_RZD]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy
hmotnost=56
delka=26,4
osob=33
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=482
[WLABm62_UZ]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy
hmotnost=57
delka=24,6
osob=30
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=483
imgex=4
[WLABmee62_RZD]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz 1. a 2. třídy, klimatizace
hmotnost=56
delka=26,4
osob=33
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=484
imgex=3
[WLB_RZD]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz
hmotnost=57
delka=24,6
osob=30
max_rych=160
elektro=0
vyrobce=
autor=puvodni
img=485
[WLB_ZSSK]
typ=vuz
podtrida=Osobní vozy lůžkové
popis=Lůžkový vůz
hmotnost=44
delka=24,5
osob=30
max_rych=140
elektro=0
vyrobce=<NAME> (1978-1985)
autor=puvodni
img=486
[Lekqss]
typ=vuz
podtrida=Nákladní vozy speciální
popis=dvoupatrový vůz pro přepravu automobilů
hmotnost=24
delka=27
typ_nakladu=automobily
naklad=19
max_rych=140
elektro=0
vyrobce=
autor=puvodni
img=487
[Laekks]
typ=vuz
podtrida=Nákladní vozy speciální
popis=dvoupatrový vůz pro přepravu automobilů
hmotnost=17
delka=17,1
typ_nakladu=automobily
naklad=19
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=488
[Sgnss]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=kontejnerový vůz
hmotnost=20
delka=19,7
typ_nakladu=kontejnery
naklad=60
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=489
imgex=1
[Sgnss Ko]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=kontejnerový vůz - ložený
hmotnost=20
delka=19,7
typ_nakladu=kontejnery
naklad=60
max_rych=120
elektro=0
vyrobce=
autor=puvodni
img=490
imgex=6
[Sggmrss]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=dvoudílný kontejnerový vůz s kloubem
hmotnost=30
delka=29,6
typ_nakladu=kontejnery
naklad=80
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=491
imgex=2
[Sggmrss Ko]
typ=vuz
podtrida=Nákladní vozy plošinové
popis=dvoudílný kontejnerový vůz s kloubem - v loženém stavu
hmotnost=30
delka=29,6
typ_nakladu=kontejnery
naklad=80
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=492
imgex=3
[Falls U]
typ=vuz
podtrida=Nákladní vozy otevřené
popis=výsypný čtyřnápravový vůz - v loženém stavu
hmotnost=26
delka=13,5
typ_nakladu=uhlí
naklad=54
max_rych=100
elektro=0
vyrobce=
autor=puvodni
img=493
imgex=1
[ET22_PKP]
typ=hnaci
podtrida=El. stejnosměrné
popis=elektrická stejnosměrná lokomotiva
hmotnost=120
delka=19,24
vykon=3000
sila=412
max_rych_hnaci=125
max_rych=125
elektro=1
vyrobce=<NAME> (1969-1989)
autor=puvodni
img=494
imgex=4
<file_sep>/php/vini/getvini.php
<?
include "./root.php";
ini_set('default_charset', 'windows-1250');
header("Content-type: text/plain; charset=windows-1250");
include "{$root}obecne.php";
include "{$root}sql.php";
mysqli_set_charset ( $sql_id,'cp1250');
$dat=mysqli_fetch_assoc(mysqli_query($sql_id,'SELECT * FROM verze;'));
$verze=$dat['cisloverze'];
headers();
header('Content-type: text/plain; charset=windows-1250');
echo ";vozy.ini PHP generator\n";
echo ";(c) bbf 2005\n";
echo ";Datum vytvoreni: ".Date("d. M Y H:i:s")."\n\n\n\n\n";
//default hlavicka
echo <<<EOT
[default]
_verze=$verze
EOT;
//generuji seznam
$res=mysqli_query($sql_id,"SELECT * FROM vozy;");
while ($dat=mysqli_fetch_array($res)) {
echo "[{$dat['nazev']}]\n";
echo "typ=".($dat['hnaci']?'hnaci':'vuz')."\n";
echo "podtrida={$dat['trida']}\n";
echo "popis={$dat['popis']}\n";
echo "hmotnost={$dat['hmotnost']}\n";
echo "delka=".strtr($dat['delka'],".",',')."\n";
if ($dat['hnaci']) {
echo "vykon={$dat['vykon']}\n";
echo "sila={$dat['sila']}\n";
echo "max_rych_hnaci={$dat['max_rych_hnaci']}\n";
};
if ($dat['osob']) {
echo "osob={$dat['osob']}\n";
}
if ($dat['typ_nakladu']) {
echo "typ_nakladu={$dat['typ_nakladu']}\n";
echo "naklad={$dat['naklad']}\n";
}
echo "max_rych={$dat['max_rych']}\n";
echo "elektro={$dat['elektro']}\n";
echo "vyrobce={$dat['vyrobce']}\n";
echo "autor={$dat['autor']}\n";
if (File_exists("img/v_{$dat['id']}.png")) {
echo "img={$dat['id']}\n";
} else {
echo "img=-1\n";
};
$oi=1;
while (File_exists("img/v_{$dat['id']}_$oi.png")) {
$oi++;
}
if ($oi-->1) {
echo "imgex=$oi\n";
}
echo "\n";
};
?>
<file_sep>/php/vini/getpng.php
<?
$file=$_GET['file'];
if (File_Exists('img/'.$file)) {
header('Content-type: image/png');
$fp=fopen('img/'.$file,'rb');
echo fread($fp,filesize('img/'.$file));
fclose($fp);
} else {
header('Content-type: image/png');
include('img/vuz_default.png');
}
?>
<file_sep>/php/index.php
<?
include './root.php';
include "{$root}obecne.php";
layout_header("Staničář - podpora vývoje","Staničář");
//moznosti prace podle promenne AKCE:
//podle prilogovaneho uzivatele:
//pokud _SESSION["rights"]>0
//pridava se moznost pridani, modifikace a mazani vozu
//!$akce -> tabulka vozu, odkaz na aktualni vozy.ini
//$akce=change $id (-1=>pridani)
//$akce=save
//$akce=del
$_SESSION['userid']='puvodni';
?>
<p>
Tyto WWW stránky jsou určeny pro vývojáře a příznivce simulátoru Staničář.
</p>
<div style="background-color:#77DD77;color:white;font-weight:bold;padding:20px;">
<p>Vážení přátelé,</p>
<p>vzhledem k přechodu serveru na PHP7 jsme trochu rozbili online definice vozů, dokud aplikaci někdo neupraví. Většina funkcí zdá se jede, ale chyby vznikají - vím třeba o nefunčním náhledu při editaci obrázků. Další chyby prosím hlaste do fóra na k-reportu.
Já na opravy nyní upřímně nemám čas, jsem rád, že jsem našel pár hodin na opětovné zprovoznění aspoň nějak. Dobrovolníci? napište na embedix at centrum.cz </p>
<p>autor Stagu -bbf- 29.10.2018</p>
<p>P.S. S opravami by se také mohlo dořešit přihlašování. </p>
</div>
<p><b>Pozor:</b> pro správnou funkci načítání DB vozů je nutné v souboru stag.ini Stagu změnit sekci update takto:</p>
<pre>
[update]
base=http://stag.strasil.net/vini/
</pre>
<?
layout_end();
?>
<file_sep>/php/vini/_old_img/repair.php
<?php
error_reporting(E_ALL ^ E_NOTICE);
//echo "<h3>Mimo provoz - bylo zrizeno kvuli problemum na pipni.cz, tady to doufam nebude potreba</h3>";
//die();
$file = 'data/';
// set up basic connection
$conn_id = ftp_connect('stanicar.ic.cz');
// login with username and password
//$login_result = ftp_login($conn_id, 'stanicar', 'stag');
echo "<h3>Pripojen...</h3>";
if ($handle = opendir('./')) {
echo "<h4>Adresar otevren</h4>";
$sum=0;
while (false !== ($file = readdir($handle))) {
echo "Mam soubor - ";
if ($file != "." && $file != "..") {
echo "jsou to data -";
$fname=$file;
if (file_exists("./".$fname)) {
echo "existuji - ";
/*if (ftp_chmod($conn_id, 0777, './data/'.$fname) !== false) {
echo "$fname OK<br>\n";
} else */{
echo "could not chmod $fname - testuji PHP funkci:\n";
if (chmod ('./'.$fname,0777)) {
echo "OK - OPRAVENO";
} else {
echo "Chyba - NELZE OPRAVIT";
};
echo "<br>\n";
}
};
}
};
closedir($handle);
} else {
echo "<h3>Chyba: adresář se soubory není dostupný!</h3>";
};
echo "<h3>Ukonceno.</h3>";
// close the connection
ftp_close($conn_id);
?>
<file_sep>/php/vini/backup.php
<?php
header('Content-type: text/plain');
require_once "./../sql.php";
require_once "./../obecne.php";
function message_die($typ,$hlaska,$hl2,$line,$file,$sql)
{
echo ("CHYBA $typ $hlaska $hl2 na $line v $file\n$sql");
die();
}
function get_table_def_mysql($table, $crlf)
{
global $drop, $db;
$schema_create = "";
$field_query = "SHOW FIELDS FROM $table";
$key_query = "SHOW KEYS FROM $table";
$schema_create .= "CREATE TABLE $table($crlf";
//
// Ok lets grab the fields...
//
$result = mysql_query($field_query);
if(!$result)
{
message_die(GENERAL_ERROR, "Failed in get_table_def (show fields)", "", __LINE__, __FILE__, $field_query);
}
while ($row = mysql_fetch_array($result))
{
$schema_create .= ' ' . $row['Field'] . ' ' . $row['Type'];
if(!empty($row['Default']))
{
$schema_create .= ' DEFAULT \'' . $row['Default'] . '\'';
}
if($row['Null'] != "YES")
{
$schema_create .= ' NOT NULL';
}
if($row['Extra'] != "")
{
$schema_create .= ' ' . $row['Extra'];
}
$schema_create .= ",$crlf";
}
//
// Drop the last ',$crlf' off ;)
//
$schema_create = ereg_replace(',' . $crlf . '$', "", $schema_create);
//
// Get any Indexed fields from the database...
//
$result = mysql_query($key_query);
if(!$result)
{
message_die(GENERAL_ERROR, "FAILED IN get_table_def (show keys)", "", __LINE__, __FILE__, $key_query);
}
while($row = mysql_fetch_array($result))
{
$kname = $row['Key_name'];
// print_r($row);
if(($kname != 'PRIMARY') && ($row['Non_unique'] == 0))
{
$kname = "UNIQUE|$kname";
}
if(!is_array($index[$kname]))
{
$index[$kname] = array();
}
$index[$kname][] = $row['Column_name'];
}
while(list($x, $columns) = @each($index))
{
$schema_create .= ", $crlf";
if($x == 'PRIMARY')
{
$schema_create .= ' PRIMARY KEY (' . implode($columns, ', ') . ')';
}
elseif (substr($x,0,6) == 'UNIQUE')
{
$schema_create .= ' UNIQUE ' . substr($x,7) . ' (' . implode($columns, ', ') . ')';
}
else
{
$schema_create .= " FULLTEXT (" . implode($columns, ', ') . ')';
}
}
$schema_create .= "$crlf);";
if(get_magic_quotes_runtime())
{
return(stripslashes($schema_create));
}
else
{
return($schema_create);
}
} // End get_table_def_mysql
//
// This function is for getting the data from a mysql table.
//
function get_table_content_mysql($table, $handler)
{
global $db;
// Grab the data from the table.
if (!($result = mysql_query("SELECT * FROM $table")))
{
message_die(GENERAL_ERROR, "Failed in get_table_content (select *)", "", __LINE__, __FILE__, "SELECT * FROM $table");
}
// Loop through the resulting rows and build the sql statement.
if ($row = mysql_fetch_array($result))
{
$handler("\n#\n# Table Data for $table\n#\n");
$field_names = array();
// Grab the list of field names.
$num_fields = mysql_num_fields($result);
$table_list = '(';
for ($j = 0; $j < $num_fields; $j++)
{
$field_names[$j] = mysql_field_name($result,$j);
$table_list .= (($j > 0) ? ', ' : '') . $field_names[$j];
}
$table_list .= ')';
do
{
// Start building the SQL statement.
$schema_insert = "INSERT INTO $table $table_list VALUES(";
// Loop through the rows and fill in data for each column
for ($j = 0; $j < $num_fields; $j++)
{
$schema_insert .= ($j > 0) ? ', ' : '';
if(!isset($row[$field_names[$j]]))
{
//
// If there is no data for the column set it to null.
// There was a problem here with an extra space causing the
// sql file not to reimport if the last column was null in
// any table. Should be fixed now :) JLH
//
$schema_insert .= 'NULL';
}
elseif ($row[$field_names[$j]] != '')
{
$schema_insert .= '\'' . addslashes($row[$field_names[$j]]) . '\'';
}
else
{
$schema_insert .= '\'\'';
}
}
$schema_insert .= ');';
// Go ahead and send the insert statement to the handler function.
$handler(trim($schema_insert));
}
while ($row = mysql_fetch_array($result));
}
return(true);
}
function output_table_content($content)
{
global $tempfile;
//fwrite($tempfile, $content . "\n");
//$backup_sql .= $content . "\n";
echo $content ."\n";
return;
}
echo "#\n";
echo "# gvid.wz.cz Backup Script\n";
echo "# Vypis tabulek z databaze >$sql_db<\n";
echo "#\n# Datum : " . gmdate("d-m-Y H:i:s", time()) . " GMT\n";
echo "#\n";
$tables=mysql_query("SHOW TABLES FROM $sql_db;");
echo "# Pocet tabulek: ".mysql_num_rows($tables)."\n";
echo "#\n";
echo "#\n";
while (list($table_name)=mysql_fetch_array($tables))
{
//$table_name = $tables[$i];
echo "#\n# TABLE: " . $table_prefix . $table_name . "\n#\n";
echo get_table_def_mysql($table_prefix . $table_name, "\n") . "\n";
get_table_content_mysql($table_prefix . $table_name, "output_table_content");
}
?>
<file_sep>/php/sql.php
<?
//KNIHOVNA PRO PRACI S DATABAZI
//verze pro MySQL
//<NAME> (c) 2004
//nastaveni
$sql_server='127.0.0.1';
$sql_user='';
$sql_pwd='';
$sql_db='strasilnet02';
//Seznam funkci
// sql_connect()
// sql_disconnect()
// logevent($text)
$sql_ready=0;
function sql_connect() {
global $sql_db,$sql_user,$sql_server,$sql_pwd;
global $sql_id;
$sql_id=mysqli_connect($sql_server,$sql_user,$sql_pwd);
if (!$sql_id)
{
$sql_id = mysqli_connect("localhost",$sql_db,$sql_pwd);
}
if ($sql_id&&mysqli_select_db($sql_id,$sql_db)) {
$sql_ready=1;
mysqli_set_charset ( $sql_id,'cp1250');
return 1;
} else {
echo '<html><meta content="text/html; charset=windows-1250" http-equiv="Content-Type"><body>';
echo "<h3><font color=\"red\">CHYBA PŘI PŘIPOJOVÁNÍ K SQL SERVERU - DATABÁZE NENÍ DOSTUPNÁ</font></h3>";
die("</body></html>");
return 0;
}
}
function sql_disconnect() {
mysqli_close($sql_id);
$sql_ready=0;
}
function logevent($text) {
global $REMOTE_ADDR;
$text=addslashes($text);
$host=@gethostbyaddr($REMOTE_ADDR);
$host.=" (".$REMOTE_ADDR.")";
$sql="INSERT INTO log SET text=\"$text\", user=\"$host\", time=\"".date ("l j. n. Y H:i:s")."\";";
mysqli_query($sql);
}
sql_connect();
?>
| 9ae55690ee60f9c4c3673a2ac6bb65831dc94898 | [
"Markdown",
"PHP",
"INI"
] | 13 | INI | embedix/stag-web | 38fbcf24ce4ba47b8acb9fc40f44b1d0d6a42908 | 495ef9e644928ee43d10177c38ee975cd8a77a0d |
refs/heads/master | <file_sep>var skills = '<div class="col-md-3"><img src="assets/images/skills/%data%.png" alt="my skills" class="img-responsive img-circle"></div>';
for(var i = 1; i <= 12; i++)
$('.skills-section-images').append(skills.replace('%data%', i));
var experience = '<li><h4>%data% ( %data1% ) </h4><br><h5>%data2%</h5></li><hr>';
var experience_array = [
{
name: 'First Company',
years: '2012-2015',
info: 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.'
},
{
name: 'Second Company',
years: '2015-2016',
info: 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.'
},
{
name: 'Third Company',
years: '2016-2017',
info: 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.'
}
];
var experience_new = '';
for(var i = 0; i < experience_array.length; i++)
{
experience_new = experience.replace('%data%', experience_array[i]['name']);
experience_new = experience_new.replace('%data1%', experience_array[i]['years']);
experience_new = experience_new.replace('%data2', experience_array[i]['info']);
$('.experience-section-info').append(experience_new);
}
var projects = '<li><h3>%data%</h3><div class="row"><div class="col-md-3"><img src="assets/images/%data1%.jpg" alt="project-images" class="img-responsive"></div><div class="col-md-9"><h3>Language(s): %data2%</h3><h5>%data3%</h5></div></div></li><br><hr>';
var projects_array = [
{
name: 'Selenium Project',
img: 'rockpaper',
language: 'Java',
info: 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged.'
},
{
name: 'Adventure Game',
img: 'rpg',
language: 'Java',
info: 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged.'
},
{
name: '<NAME>',
img: 'ucfinfo',
language: 'HTML5, CSS3, JavaScript',
info: 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged.'
}
];
var projects_new = '';
for(var i = 0; i < projects_array.length; i++)
{
projects_new = projects.replace('%data%', projects_array[i]['name']);
projects_new = projects_new.replace('%data1%', projects_array[i]['img']);
projects_new = projects_new.replace('%data2%', projects_array[i]['language']);
projects_new = projects_new.replace('%data3%', projects_array[i]['info']);
$('.projects-section-info').append(projects_new);
}
<file_sep># Basic-Portfolio-JS-jQuery
| 2d71f9f60e512221bf184f1a4d7b98c0df8d494c | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | hedbladucf/Basic-Portfolio-JS-jQuery | 0bbdd41d0ce69c085f103cffad2dbe922e09c3da | 3732d0fc6416f5f9320f1214701c84ef17b0c2e3 |
refs/heads/master | <file_sep>package com.slashg.pickerview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.CountDownTimer;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.GestureDetector.OnGestureListener;
import android.view.MotionEvent;
import android.widget.HorizontalScrollView;
public class HorizontalScrollViewExt extends HorizontalScrollView {
public HorizontalScrollViewExt(Context context) {
super(context);
init(context);
}
public HorizontalScrollViewExt(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public HorizontalScrollViewExt(Context context, AttributeSet attrs,
int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
}
private OnFlingCallback[] flingCallbacks;
private OnStoppedCallback[] stoppedCallbacks;
private VelocityCallback[] velocityUpdateCallbacks;
private GestureDetector gestureDetector;
private CountDownTimer speedMeasureTimer; // 100 ms timer that measure scroll distance per second (scroll velocity)
private float scrollVelocity = 0f;
int scrollX = 0, newScrollX = 0;
boolean isScrolled = false; //boolean flag to distinguish between scrolls, flings and taps
private void init(Context context)
{
flingCallbacks = new OnFlingCallback[0];
stoppedCallbacks = new OnStoppedCallback[0];
velocityUpdateCallbacks = new VelocityCallback[0];
speedMeasureTimer = new CountDownTimer(100, 100) {
@Override
public void onTick(long millisUntilFinished) {
}
@Override
public void onFinish() {
float seconds = 0.1f;
newScrollX = getScrollX();
scrollVelocity = (newScrollX - scrollX) / seconds;
onVelocityUpdate(scrollVelocity);
scrollX = newScrollX;
}
};
gestureDetector = new GestureDetector(context, new OnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {
System.out.println("HorizontalScrollViewExt::onSingleTapUp( )" );
return false;
}
@Override
public void onShowPress(MotionEvent e) {
System.out.println("HorizontalScrollViewExt::onShowPress( )" );
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX,
float distanceY) {
isScrolled = true;
System.out.println("HorizontalScrollViewExt::onScroll( )" );
return true;
}
@Override
public void onLongPress(MotionEvent e) {
System.out.println("HorizontalScrollViewExt::onLongPress( )" );
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX,
float velocityY) {
isScrolled = false; //set scroll flag to false to make 'fling' more dominant
System.out.println("HorizontalScrollViewExt::onFling( )" );
speedMeasureTimer.start();
triggerOnFlingCallbacks(velocityX);
return true;
}
@Override
public boolean onDown(MotionEvent e) {
System.out.println("HorizontalScrollViewExt::onDown( )" );
return false;
}
});
}
@SuppressLint("ClickableViewAccessibility") @Override
public boolean onTouchEvent(MotionEvent ev)
{
gestureDetector.onTouchEvent(ev);
if(ev.getAction() == MotionEvent.ACTION_UP && isScrolled)
{
triggerOnStoppedCallback();
isScrolled = false;
}
return super.onTouchEvent(ev);
}
private void onVelocityUpdate(float velocity)
{
System.out.println("HorizontalScrollViewExt::onVelocityUpdate( " + velocity + " )" );
scrollVelocity = velocity;
triggerVelocityCallback(velocity);
if(velocity == 0)
{
onStopScroll();
}
else
{
speedMeasureTimer.start();
}
}
private void onStopScroll()
{
System.out.println("HorizontalScrollViewExt::onStopScroll()" );
speedMeasureTimer.cancel();
triggerOnStoppedCallback();
}
public void addOnFlingCallback(OnFlingCallback callback)
{
OnFlingCallback[] temp = flingCallbacks;
flingCallbacks = new OnFlingCallback[temp.length + 1];
for (int i = 0; i<temp.length ; i++)
{
flingCallbacks[i] = temp[i];
}
flingCallbacks[temp.length] = callback;
}
public void addVelocityCallback(VelocityCallback callback)
{
VelocityCallback[] temp = velocityUpdateCallbacks;
velocityUpdateCallbacks = new VelocityCallback[temp.length + 1];
for (int i = 0; i<temp.length ; i++)
{
velocityUpdateCallbacks[i] = temp[i];
}
velocityUpdateCallbacks[temp.length] = callback;
}
public void addOnStoppedCallback(OnStoppedCallback callback)
{
OnStoppedCallback[] temp = stoppedCallbacks;
stoppedCallbacks = new OnStoppedCallback[temp.length + 1];
for (int i = 0; i<temp.length ; i++)
{
stoppedCallbacks[i] = temp[i];
}
stoppedCallbacks
[temp.length] = callback;
}
private void triggerOnFlingCallbacks(float velocity)
{
/**
* Triggers all registered callback actions for fling events
*/
for (OnFlingCallback flingCallback : flingCallbacks) {
if(flingCallback != null)
{
flingCallback.flung(velocity);
}
}
}
private void triggerVelocityCallback(float velocity)
{
/**
* Triggers all registered callback actions with scroll velocity updates
*/
for (VelocityCallback velocityCallback : velocityUpdateCallbacks) {
if(velocityCallback != null)
{
velocityCallback.velocityUpdate(velocity);
}
}
}
private void triggerOnStoppedCallback()
{
/**
* Triggers all registered callback actions for end-of-scroll events
*/
for (OnStoppedCallback onStoppedCallback : stoppedCallbacks) {
if(onStoppedCallback != null)
{
onStoppedCallback.onStopScroll();
}
}
}
}
// Callback interfaces for fling, velocity-update and end of scroll
interface OnFlingCallback { public void flung(float velocity); }
interface VelocityCallback { public void velocityUpdate(float velocity); }
interface OnStoppedCallback { public void onStopScroll(); } | 415d985f9b10ae52e1e67490b46948e7fc2bb7e8 | [
"Java"
] | 1 | Java | slashg0/numberpicker | 71ef2c836734f2984b55a214c4b65a63d8327c59 | 46d62af813ce5c62afd8a73c251b34eb1bab9efc |
refs/heads/master | <file_sep>#read data
data <- read.table("/Users/apple/Desktop/ID5059/Practicals/P01/auto-mpg.data", sep=" ", header=T)
attach(data)
#remove rows with NAs
cleandata <- na.omit(data)
# convert to comma separated values for use in Excel
write.csv(cleandata,"auto-data.csv")
#####1 Linear Models
#The function linreg performs a linear regression
#Input Arguments:
# x - vector containing the explonatory variable
# y - vector containing the dependent variable
# ouptut - 1: delivers some output, 0: no output
# opt - 1: returns adj R-squared, 0: returns nothing
# ploto - 1: Create new plot, 0: no new plot
linreg <- function(x, y, output=1, ploto=1, opt=0)
{
#Scale data to [0,1]
x<-x-min(x)
x<-x/max(x)
#Create a Design Matrix DM
n <- length(x)
q <- 2
DM = matrix(1,n,q)
DM[,2] <- x
#Perform regression
# beta <- solve(t(DM)%*%DM)%*%t(DM)%*%y
reg <- lm(y~0+DM)
#Calculate goodness of fit measures
#Residual sum of squares
rss <- sum(sapply(residuals(reg), function(x) { x^2 }))
#Coefficient of determination: R^2
R2 <- 1 - (rss/ (t(y)%*%y-(mean(y)**2*n)))
#Adjusted Coefficient of determination: R^2
R2adj <- 1 - ( (n-1)/(n-q) ) * (1-R2)
#AIC
aic <- AIC(reg)
if(output==1)
{
#Summary output
cat("RSS: ", rss, "\n")
cat("TSS: ", t(y)%*%y-(mean(y)**2/n), "\n")
cat("R-squared: ", R2, "\n")
cat("Adjusted R-squared: ", R2adj, "\n")
cat("AIC: ", aic, "\n")
#cat("Coefficents: \n")
#print(coef(reg))
#print(summary(reg))
#print(anova(reg))
#Graphic
xp <- 0:100/100
n <- length(xp)
DMp = matrix(1,n,q)
DMp[,2] <- xp
if(ploto==1) par(mfrow=c(1,2))
if(ploto==1) matplot(xp, DMp, type="l", lwd=2, main="Individual functions")
if(ploto==1) plot(x,y, main="Linear regression", pch=20, col="darkblue")
lines(xp, DMp%*%coef(reg), col="orange", type="l", lwd=2)
}
if(opt==1) {return(c(R2adj,aic))}
}
#show the graphs and RSS of linear regression and calculate the MSE
##1 displacement
linreg(cleandata$displacement, cleandata$mpg)
mse_displacement1 <- 8378.822/length(cleandata$mpg)
##2 horsepower
linreg(cleandata$horsepower, cleandata$mpg)
mse_horsepower1 <- 9385.916/length(cleandata$mpg)
##3 weight
linreg(cleandata$weight, cleandata$mpg)
mse_weight1 <- 7321.234/length(cleandata$mpg)
##4 acceleration
linreg(cleandata$acceleration, cleandata$mpg)
mse_acceleration1 <- 19550.46/length(cleandata$mpg)
#mse of all four attributes
mse_linear <- c(mse_displacement1, mse_horsepower1, mse_weight1, mse_acceleration1)
mse_linear
#####2 Bin Smooths
#The function binsmoothREG performs a binsmooth regression with a user defined binlength
#Input Arguments:
# x - vector containing the explonatory variable
# y - vector containing the dependent variable
# binlength - amount of x values per bin
# ouptut - 1: delivers some output, 0: no output
# opt - 1: returns adj R-squared, 0: returns nothing
# ploto - 1: Create new plot, 0: no new plot
binsmoothREG <- function(x, y, binlength=20, knotsdef=NULL, output=1, ploto=1, opt=0)
{
#Scale data to [0,1]
x<-x-min(x)
x<-x/max(x)
#Sort x values in ascending order
y <- y[order(x)]
x <- sort(x)
n <- length(x)
#Devide data into bins
if(is.vector(knotsdef)) bins = knotsdef
else bins = ceiling(length(x) / binlength)
#Create Design Matrix without intercept
DM <- matrix(1, length(x), bins)
#Set all elements not corresponding to region j equal 0
for(i in 1:bins)
{
if(i==1) { xstart = 1 }
if(i>1) { xstart = (i-1)*binlength+1 }
xend = min(xstart + binlength-1, length(x))
binelements <- xstart:xend
elements <- 1:length(x)
elements[binelements] <- 0
DM[elements,i] <- 0
}
#Perform Linear Regreesion
reg <- lm(y~0+DM)
#Calculate goodness of fit measures
q <- bins
#Residual sum of squares
rss <- sum(sapply(residuals(reg), function(x) { x^2 }))
#Coefficient of determination: R^2
R2 <- 1 - (rss/ (t(y)%*%y-(mean(y)**2*n)))
#Adjusted Coefficient of determination: R^2
R2adj <- 1 - ( (n-1)/(n-q) ) * (1-R2)
#AIC
aic <- AIC(reg)
if(output==1)
{
#Summary output
cat("Elements per bin: ", binlength, "\n")
cat("Number of bins: ", bins, "\n")
cat("RSS: ", rss, "\n")
cat("TSS: ", t(y)%*%y-(mean(y)**2/n), "\n")
cat("R-squared: ", R2, "\n")
cat("Adjusted R-squared: ", R2adj, "\n")
cat("AIC: ", aic, "\n")
#cat("Coefficents: \n")
#print(coef(reg))
#print(summary(reg))
#print(anova(reg))
#Graphic
if(ploto==1) plot(x,y, main="Binsmooth regression", pch=20, col="darkblue")
j<-1
for(i in 1:length(coef(reg)))
{
if(i>1) lines(c(x[xend],x[xend]), c(as.numeric(coef(reg)[i-1]), as.numeric(coef(reg)[i])), col="red", lwd=2)
xstart = j
if(i>1) lines(c(x[xend],x[xstart]), c(as.numeric(coef(reg)[i]), as.numeric(coef(reg)[i])), col="red", lwd=2)
xend = min(j+binlength-1, length(x))
lines(c(x[xstart],x[xend]), rep(as.numeric(coef(reg)[i]), 2), col="red", lwd=2)
j<-j+binlength
}
}
if(opt==1) return(c(R2adj,aic))
}
#show the graphs and RSS of bin smooths model and calculate the MSE
##1 displacement
binsmoothREG(cleandata$displacement, cleandata$mpg)
mse_displacement2 <- 6547.693/length(cleandata$mpg)
##2 horsepower
binsmoothREG(cleandata$horsepower, cleandata$mpg)
mse_horsepower2 <- 7004.712/length(cleandata$mpg)
##3 weight
binsmoothREG(cleandata$weight, cleandata$mpg)
mse_weight2 <- 6530.837/length(cleandata$mpg)
##4 acceleration
binsmoothREG(cleandata$acceleration, cleandata$mpg)
mse_acceleration2 <- 17757.41/length(cleandata$mpg)
#mse of all four attributes
mse_binsmooths <- c(mse_displacement2, mse_horsepower2, mse_weight2, mse_acceleration2)
mse_binsmooths
#####3 b-spline bases
#The function bsplinereg performs a bspline regression with user defined knots
#Input Arguments:
#Input Arguments bsplinereg(...):
# x - vector containing the explonatory variable
# y - vector containing the dependent variable
# knots - number of knots in [0,1]
# ouptut - 1: delivers some output, 0: no output
# opt - 1: returns adj R-squared, 0: returns nothing
# ploto - 1: Create new plot, 0: no new plot
#Calculate basis (rekursiv)
basis <- function(x, degree, i, knots)
{
if(degree == 0)
{ B <- ifelse((x >= knots[i]) & (x < knots[i+1]), 1, 0)
} else {
if((knots[degree+i] - knots[i]) == 0)
{ alpha1 <- 0
} else {
alpha1 <- (x - knots[i])/(knots[degree+i] - knots[i]) }
if((knots[i+degree+1] - knots[i+1]) == 0)
{ alpha2 <- 0
} else { alpha2 <- (knots[i+degree+1] - x)/(knots[i+degree+1] - knots[i+1]) }
B <- alpha1*basis(x, (degree-1), i, knots) + alpha2*basis(x, (degree-1), (i+1), knots)
}
return(B)
}
#Create bspline Desin Matrix
bspline <- function(x, degree, knotpos)
{
#Number of basis
K <- length(knotpos) + degree + 1
#Number of observations
n <- length(x)
#Set Boundary knots
Boundary.knots = c(0,1)
#create new vector with knot positons
knotpos <- c(rep(Boundary.knots[1], (degree+1)), knotpos, rep(Boundary.knots[2], (degree+1)))
#Create design matrix
DM <- matrix(0,n,K)
for(j in 1:K) DM[,j] <- basis(x, degree, j, knotpos)
if(any(x == Boundary.knots[2])) DM[x == Boundary.knots[2], K] <- 1
#Return DM
return(DM)
}
bsplinereg <- function(x, y, knots=0, knotsdef=NULL, degree, output=1, ploto=1, opt=0)
{
#Scale data to [0,1]
x<-x-min(x)
x<-x/max(x)
#Sort x values in ascending order
y <- y[order(x)]
x <- sort(x)
n <- length(x)
#Calculate knot postions
if(knots == 0) knotpos <- NULL
if(knots != 0) knotpos <- 1:knots / (knots+1)
if(length(knotsdef)>0) knotpos <- knotsdef
#Create Design Matrix
DM <- bspline(x, degree, knotpos)
#Perform penalized regression
reg <- lm(y ~ 0 + DM)
print(summary(reg))
#Calculate goodness of fit measures
q <- length(knotpos) + degree + 1
#Residual sum of squares
rss <- sum(sapply(residuals(reg), function(x) { x^2 }))
#Coefficient of determination: R^2
R2 <- 1 - (rss/ (t(y)%*%y-(mean(y)**2*n)))
#Adjusted Coefficient of determination: R^2
R2adj <- 1 - ( (n-1)/(n-q) ) * (1-R2)
#AIC
aic <- AIC(reg)
if(output==1)
{
#Summary output
cat("Number of knots = ", knots, "\n")
cat("Knot positions = ", knotpos, "\n")
cat("RSS: ", rss, "\n")
cat("TSS: ", t(y)%*%y-(mean(y)**2/n), "\n")
cat("R-squared: ", R2, "\n")
cat("Adjusted R-squared: ", R2adj, "\n")
cat("AIC: ", aic , "\n")
#cat("Coefficents: \n")
#print(coef(reg))
#print(summary(reg))
#print(anova(reg))
#Graphics
#Values for prediction
xp <- 0:100/100
DMp <- bspline(xp, degree, knotpos)
if(ploto==1)par(mfrow=c(1,2))
if(ploto==1) matplot(xp, (DMp), type="l", lwd=2, main="Individual spline functions")
if(ploto==1) for(i in 1:length(knotpos)) abline(v=knotpos[i], col="red", lty=2)
if(ploto==1) plot(x,y, main="BSpline Regression", pch=20, col="darkblue")
points(xp,DMp%*%coef(reg), type="l", lwd=2, col="brown")
if(ploto==1) for(i in 1:length(knotpos)) abline(v=knotpos[i], col="red", lty=2)
}
if(opt==1) return(c(R2adj, aic))
}
#plot the relationship of mpg and x variables for suitable knots and degree
plot(cleandata$mpg~cleandata$displacement)
plot(cleandata$mpg~cleandata$horsepower)
plot(cleandata$mpg~cleandata$weight)
plot(cleandata$mpg~cleandata$acceleration)
#knots should be 2
#show the graphs and RSS of b-spline and calculate the MSE
##1 displacement
bsplinereg(cleandata$displacement, cleandata$mpg, degree = 8, knots = 2)
mse_displacement3 <- 6608.903/length(cleandata$mpg)
##2 horsepower
bsplinereg(cleandata$horsepower, cleandata$mpg, degree = 8, knots = 2)
mse_horsepower3 <- 7062.692/length(cleandata$mpg)
##3 weight
bsplinereg(cleandata$weight, cleandata$mpg, degree = 8, knots = 2)
mse_weight3 <- 6669.5/length(cleandata$mpg)
##4 acceleration
bsplinereg(cleandata$acceleration, cleandata$mpg, degree = 5, knots = 2)
mse_acceleration3 <- 18626.84/length(cleandata$mpg)
#mse of all four attributes
mse_bspline <- c(mse_displacement3, mse_horsepower3, mse_weight3, mse_acceleration3)
mse_bspline
| 24805bc6429275b0324fd6e7e64dc5156bc6d799 | [
"R"
] | 1 | R | pearllinyue/ID5059-Assignment1 | 7248e4d0948c33621e47c3497eb7b4066506bbb8 | 0387c42a88d5354af2630d2d2093426878185222 |
refs/heads/master | <file_sep><?php
namespace MoussaClarke;
use \DiffMatchPatch\DiffMatchPatch;
/**
* A simple json flat file/nosql database
*
* 'Collection' and 'document' are used in the MongoDB sense
*
* @author <NAME>
*/
class Planar
{
/**
* The data the collection contains
*
* @var array
*/
protected $data;
/**
* The location of the json database folder
* Can be set either by injecting into construct or over-riding in base extended class
*
* @var string
*/
protected $datafolder = null;
/**
* The name of the collection/model, inject or use the extended class name
*
* @var string
*/
protected $collectionname;
/**
* The location of the json database file
*
* @var string
*/
protected $dbfile;
/**
* Does this collection persist to database?
*
* @var bool
*/
protected $persists = true;
/**
* The schema for the collection
*
* @var array
*/
protected $schema = [];
/**
* Construct the class
* Location of data folder can be injected
* If no collection name injected, the short name of the model class that extends this one will dictate the name of the json file
*
* @param string $datafolder
*
*/
public function __construct($datafolder = null, $collectionname = null)
{
// if $datafolder not set, check the class property in case over-ridden
$datafolder = $datafolder ? $datafolder : $this->datafolder;
// if still not set, throw an exception
if (!$datafolder) {
throw new \Exception('Planar datafolder not set.');
}
// if the folder doesn't exist yet, let's make it
if (!file_exists($datafolder)) {
mkdir($datafolder);
}
// get the collection name via class short name/reflection or injected
$reflect = new \ReflectionClass($this);
if ($reflect->getShortName() != "Planar") {
$this->collectionname = $reflect->getShortName();
} elseif ($collectionname) {
$this->collectionname = $collectionname;
} else {
throw new \Exception('Planar collection name not set.');
}
$this->backupfolder = $datafolder . '/backups';
$this->dbfile = $datafolder . '/' . $this->collectionname . '.json';
if (file_exists($this->dbfile)) {
$this->data = json_decode(file_get_contents($this->dbfile), true);
} else {
$this->data = [];
$this->save();
}
}
/**
* Get the collection schema
*
* @return array
*/
public function getSchema()
{
// return the schema, which might just be a blank array
return $this->schema;
}
/**
* Return an array of documents where property named $key has a particular $value
* Case sensitive, false if nothing found
*
* @param string $key
* @param string $value
* @return array|false
*/
public function find($key, $value)
{
// search the data for the value
$found = [];
foreach ($this->data as $item) {
if ($item[$key] == $value) {
$found[] = $item;
}
}
return empty($found) ? false : $found;
}
/**
* Return the first document where property named $key has a particular value
* Case sensitive, false if nothing found
*
* @param string $key
* @param string $value
* @return array|false
*/
public function first($key, $value)
{
// search the data for the value, break on find
$found = false;
foreach ($this->data as $item) {
if ($item[$key] == $value) {
$found = $item;
break;
}
}
return $found;
}
/**
* Returns the whole collection, sorted by $sortby field
*
* @param string $sortby
* @return array
*/
public function all($sortby = null)
{
// returns the whole collection, sorted
$data = $this->data;
if ($sortby) {
uasort($data, function ($a, $b) use ($sortby) {
return $a[$sortby] > $b[$sortby];
});
}
return $data;
}
/**
* return an array of documents where any property contains $value
* case insensitive
*
* @param string $value
* @return array|false
*/
public function search($value)
{
// use recursive find algo to find all instances
$recursiveFind = function ($needle, $haystack) use (&$recursiveFind) {
if (is_array($haystack)) {
foreach ($haystack as $key => $itemvalue) {
if ($recursiveFind($needle, $itemvalue)) {
return true;
break;
};
}
return false;
} elseif (strpos(strtolower($haystack), strtolower($needle)) !== false) {
return true;
} else {
return false;
}
};
$found = [];
foreach ($this->data as $item) {
foreach ($item as $key => $itemvalue) {
if ($recursiveFind($value, $item[$key])) {
$found[] = $item;
break;
}
}
}
return empty($found) ? false : $found;
}
/**
* Replace or add a document with $properties using specific id
*
* @param string $id
* @param array $properties
* @return string|false
*/
public function set($id, array $properties)
{
// replace or add a document
$oldversion = $this->first('_id', $id);
$properties['_id'] = $id;
if ($oldversion) {
$properties['_created'] = $oldversion['_created'];
$properties['_modified'] = time();
} elseif (!$properties['_created']) {
$properties['_created'] = time();
}
$this->data[$id] = $properties;
$this->save($id);
return $id;
}
/**
* Add a new document to the collection with $properties
* Returns the new $id
*
* @param array $properties
* @return string
*/
public function add(array $properties)
{
// adds a document
$id = uniqid('');
$properties['_id'] = $id;
$properties['_created'] = time();
$this->data[$id] = $properties;
$this->save($id);
return $id;
}
/**
* Delete a document
* Returns boolean depending on success or failure
*
* @param string $id
* @return bool
*/
public function delete($id)
{
// find the document and delete it
if ($this->first('_id', $id)) {
unset($this->data[$id]);
$this->save($id);
return true;
} else {
return false;
}
}
/**
* Get historical version of the document
*
* @param string $id
* @param int $steps
* @return array|false
*/
public function history($id, $steps = 1)
{
$result = false;
$backupdata = $this->getBackupData($id);
if (!empty($backupdata)) {
$version = json_encode($this->data[$id], JSON_PRETTY_PRINT);
$backupdata = $this->getBackupData($id);
$backupdata = array_reverse($backupdata);
$differ = new DiffMatchPatch;
if (count($backupdata) >= $steps) {
for ($i = 0; $i < $steps; $i++) {
$patch = $differ->patch_fromText($backupdata[$i]['diff']);
$version = $differ->patch_apply($patch, $version)[0];
}
$result = json_decode($version, true);
}
}
return $result;
}
/**
* Restore a deleted document
*
* @param string $id
* @return string|false
*/
public function restore($id)
{
// return false if record exists or no backup, i.e. nothing to undelete
if ($this->first($id) || empty($this->getBackupData($id))) {
return false;
}
$properties = $this->history($id);
$properties['_modified'] = time();
$this->data[$id] = $properties;
$this->save($id);
return $id;
}
/**
* Save the collection data to json
*
* @param string $id
*/
protected function save($id=false)
{
if ($id) {
$this->preSaveTasks($id);
}
$jsondata = json_encode($this->data, JSON_PRETTY_PRINT);
file_put_contents($this->dbfile, $jsondata);
}
/**
* Perform tasks that need doing before saving collection data
*
* @param string $id
*/
protected function preSaveTasks($id)
{
if ($this->persists) {
$this->backup($id); // backup persistent models
} else {
$this->garbage(); // garbage collect non-persistent models
}
}
/**
* Clear out non-persistent collections/models
* Deletes all documents that are at least one day old
*/
protected function garbage()
{
// once a day = 86400
$allDocuments = $this->all();
$now = time();
foreach ($allDocuments as $document) {
if ($now - $document['_created'] > 86400) {
$this->delete($document['_id']);
}
}
}
/**
* Make a backup diff of the changed document
*
* @param string $id
*/
protected function backup($id)
{
if (!file_exists($this->backupfolder)) {
mkdir($this->backupfolder);
}
$olddata = json_encode(json_decode(file_get_contents($this->dbfile), true)[$id], JSON_PRETTY_PRINT);
$newdata = json_encode($this->data[$id], JSON_PRETTY_PRINT);
//generate the diff
$timestamp = time();
$differ = new DiffMatchPatch;
$patch = $differ->patch_make($newdata, $olddata);
$result = $differ->patch_toText($patch);
$backupdata = $this->getBackupData($id);
$backupdata[] = ['diff' => $result, 'timestamp' => $timestamp];
$this->writeBackupData($id, $backupdata);
}
/**
* Get the backup data for a specific document
*
* @param string $id
* @return array
*/
protected function getBackupData($id)
{
$backupfile = $this->backupfolder . '/' . $this->collectionname . '_' . $id . '_backup.json';
if (file_exists(($backupfile))) {
$backupdata = json_decode(file_get_contents($backupfile), true);
} else {
$backupdata = [];
}
return $backupdata;
}
/**
* Store backup data for a specific document
*
* @param string $id
* @param array $backupdata
*/
protected function writeBackupData($id, $data)
{
$backupfile = $this->backupfolder . '/' . $this->collectionname . '_' . $id . '_backup.json';
file_put_contents($backupfile, json_encode($data, JSON_PRETTY_PRINT));
}
/**
* Over-write the entire collection
*
* @param array $collection
*/
protected function overwrite(array $collection)
{
//overwrites all data without validating - destructive!
$this->data = $collection;
$this->save();
}
}
<file_sep># Planar
## A super simple flat file json database / model
Throughout this readme, 'document' and 'collection' are used in the MongoDb sense.
Planar is a very basic flat file/nosql json database solution.
Planar is simple, fast, super-flexible, and probably very brittle. It's useful for small projects, where you have a relatively simple data structure, you don't need 1000s of documents and you only have a small amount of users with edit permissions.
It probably won't scale well, there's no collision detection or record locking, and will most likely slow to a crawl once your collections get really large, but I've used it in production apps with 100s of documents and had zero issues so far.
Planar creates json collections on the fly as needed, everything gets json encoded and stored in flat files. It is ORM-like, even though that's pretty much an irrelevant term since this is all json anyway, but you can do CRUD and simple queries on a 'model'-like object.
It backs up every change using diffs to make undos possible.
## Disclaimer
It's still pretty alpha, could break at any time, and I might make backwards incompatible changes without any warning. Don't use it for anything too business critical. You have been warned.
## Install
`composer require moussaclarke\planar`
## Usage
### Instantiation
At its simplest, you just extend the class to create your model/collection. It will use the class name (plural makes sense here) as the json collection name.
```
use MoussaClarke\Planar;
class Widgets extends Planar
{
}
```
You can then instantiate passing in a data folder location. The json and backup diff files will be stored in the folder you specify. If the json file doesn't exist yet, it will be created.
```
$widgets = new Widgets('path/to/data/folder');
```
You can also set a data folder location by over-riding the datafolder property on the class.
```
protected $datafolder='path/to/data/folder';
```
You can then just do the following to instantiate:
```
$widgets = new Widgets;
```
You could set the data folder on a base model class, for example, which the concrete models extend.
Alternatively, if you need things to be slightly less tightly coupled, you can also inject both data folder and collection name without extending the Planar class.
```
$widgets = new Planar('path/to/data/folder', 'Widgets');
```
### Schema
You can add a schema for the document if you like - Planar won't do anything to enforce it, and each document could in fact have completely different elements - but it might be useful elsewhere in your app to get a default instance of the data if you're trying to maintain a more rigid model structure, i.e. as a kind of configuration info. To do this, just over-ride the `$schema` property at the top of your model class.
```
protected $schema = [
'name' => ''
'price' => '',
'weight' => '',
'subwidgets' => [],
];
```
Your initial property defaults would usually be an empty string or array, but you could also specify defaults. You can then grab your schema like this:
```
$schema = $widgets->getSchema();
```
It's up to you to then `add` or `set` it back to the collection once you've loaded the array with data.
If you need to use any runtime values to set defaults then over-ride the `getSchema` method instead.
```
public function getSchema()
{
return [
'name' => ''
'price' => '',
'weight' => '',
'subwidgets' => [],
'invoicedate' => date ('Y-m-d')
];
}
```
### Creating & updating
You can create a document with `add`. Just pass in an array of data (which simply gets json encoded), it will return the unique id of the new document.
```
$data = [
'name' => 'foobar',
'price' => 5,
'weight' => 10,
'subwidget' => ['foo' => 'bar', 'bar' => 'foo']
];
$id = $widgets->add($data);
```
You don't need to worry about adding unique id or timestamp fields, those will be created and updated automatically. `_id` is simply a `uniqid()`, and `_created` and `_modified` are unix timestamps. Those three property names, all prefixed with `_`, are therefore reserved, so try not to have those in your data/schema.
If you know the id, you can replace a whole document with `set`. You can also use this to create a document with a specific id, although Planar won't warn you if it's over-writing anything.
```
$widgets->set('57d1d2fc97aee', $data);
```
### Finding & Searching
Planar has various ways of finding and searching records, although doesn't really support any particularly sophisticated queries. `find` returns an array of documents where the named property has a particular value.
```
$result = $widgets->find('price', 5);
```
`first` returns the first document it finds where the named property has a particular value.
```
$result = $widgets->first('_id', '57d1d2fc97aee');
```
`all` returns the whole collection as an array, so you could perform more complicated queries on that.
```
$result = $widgets->all();
```
You can also sort the `all` results in ascending order by passing in a property name to sort by.
```
$result = $widgets->all('price');
```
`search` allows you to search for a term or phrase throughout the whole collection. It returns an array of documents where any property contains the value, and is case insensitive.
```
$result = $widgets->search('foo');
```
### Deleting, Undoing & Restoring
You can `delete` a document if you know the id.
```
$widgets->delete('57d1d2fc97aee');
```
You can easily retrieve the previous version of a document at the last save point, in order to, for example, perform an undo, as long as the collection is persistent.
```
// get the previous version
$previous = $widgets->history('57d1d2fc97aee');
// undo i.e. set document to previous version
$widgets->set('57d1d2fc97aee', $previous);
```
You can retrieve older versions by specifying the amount of steps to go back.
```
// get the version of the document three saves ago
$historical = $widgets->history('57d1d2fc97aee', 3);
```
While you can retrieve a deleted document with `history`, you can't `set` a document if it's been deleted. However you can `restore` a deleted document to its original id.
```
// delete the document
$widgets->delete('57d1d2fc97aee');
// and undelete it
$widgets->restore('57d1d2fc97aee');
```
### Failing
Most of the methods above will return `false` on fail so you can check if your call was successful.
```
if ($widgets->delete('57d1d2fc97aee')) {
echo 'Document succesfully deleted';
} else {
echo 'Document not found';
}
```
Only the `add` method never returns `false`. As repeatedly mentioned, it will essentially `json_encode` any array you feed it, so doesn't really 'fail' as such, unless you don't send it an array. This has its own risks, so make sure the data you feed it isn't too weird!
### Persistence
You might sometimes want non-persistent collections, for example you might want to instantiate an embedded model to scaffold out your UI, but it doesn't make sense for any data to persist within its own collection since that ultimately gets saved to the parent model/collection. For this use case, just over-ride the class `$persists` property and it will garbage collect once a day.
```
protected $persists = false;
```
### Todo
* ~~Retrieve historical state/undo~~
* ~~Undelete~~
* Errors/Exceptions
* More granular set method, i.e. just one property rather than the whole document.
* Some better query/search options, e.g. Fuzzy search / regex
* ~~Docblocks~~
* Slightly more verbose documentation with Daux.io, with clearer explanations and some longer examples
* Tests
### Maintained
By [<NAME>larke](https://github.com/moussaclarke/)
### Contribute
Feel free to submit bug reports, suggestions and pull requests. Alternatively just fork it and make your own thing.
### License
MIT
| 283f0e4093fd6f493875964e1651dbcea4ae9dbf | [
"Markdown",
"PHP"
] | 2 | PHP | moussaclarke/planar | 77ce0145617d00106311d3aceaa845f19673d27b | 1bd89f2f182278349d42686ce01007654a5154ff |
refs/heads/main | <repo_name>yagmurakinci/KresOtomasyonu<file_sep>/src/Ogretmen/GelisimRaporu.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Ogretmen;
import dosyaislemleri.Dosyaislemleri;
import java.util.Scanner;
/**
*
* @author Administrator
*/
public class GelisimRaporu {
private String isimSoyisim;
private String motorGelisim;
private String sosyalDuygusalGelisim;
private String dilGelisimi;
private String bilisselGelisim;
private String ozbakimBecerileri;
private double boy;
private double kilo;
Scanner klavye=new Scanner(System.in);
Dosyaislemleri dosya=new Dosyaislemleri();
/*?public GelisimRaporu(String isimSoyisim, String motorGelisim, String sosyalDuygusalGelisim, String dilGelisimi, String bilisselGelisim, String ozbakimBecerileri, double boy, double kilo) {
this.isimSoyisim = isimSoyisim;
this.motorGelisim = motorGelisim;
this.sosyalDuygusalGelisim = sosyalDuygusalGelisim;
this.dilGelisimi = dilGelisimi;
this.bilisselGelisim = bilisselGelisim;
this.ozbakimBecerileri = ozbakimBecerileri;
this.boy = boy;
this.kilo = kilo;
}*/
public String getIsimSoyisim() {
return isimSoyisim;
}
public void setIsimSoyisim(String isimSoyisim) {
this.isimSoyisim = isimSoyisim;
}
public String getMotorGelisim() {
return motorGelisim;
}
public void setMotorGelisim(String motorGelisim) {
this.motorGelisim = motorGelisim;
}
public String getSosyalDuygusalGelisim() {
return sosyalDuygusalGelisim;
}
public void setSosyalDuygusalGelisim(String sosyalDuygusalGelisim) {
this.sosyalDuygusalGelisim = sosyalDuygusalGelisim;
}
public String getDilGelisimi() {
return dilGelisimi;
}
public void setDilGelisimi(String dilGelisimi) {
this.dilGelisimi = dilGelisimi;
}
public String getBilisselGelisim() {
return bilisselGelisim;
}
public void setBilisselGelisim(String bilisselGelisim) {
this.bilisselGelisim = bilisselGelisim;
}
public String getOzbakimBecerileri() {
return ozbakimBecerileri;
}
public void setOzbakimBecerileri(String ozbakimBecerileri) {
this.ozbakimBecerileri = ozbakimBecerileri;
}
public double getBoy() {
return boy;
}
public void setBoy(double boy) {
this.boy = boy;
}
public double getKilo() {
return kilo;
}
public void setKilo(double kilo) {
this.kilo = kilo;
}
public void cikti(String isimSoyisim)
{
this.isimSoyisim=isimSoyisim;
System.out.println("---------G E L I S I M R A P O R U----------");
System.out.println("Gelisim bilgilerini ogrenmek istediginiz ogrencinin adini giriniz:");
isimSoyisim=klavye.next();
switch(isimSoyisim)
{
case "Cansu":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.motorGelisim="iyi";
System.out.println("Ogrencinin Motor Gelisimi:" + this.motorGelisim);
this.sosyalDuygusalGelisim="pekiyi";
System.out.println("Ogrencinin Sosyal ve Duygusal Gelisimi:" + this.sosyalDuygusalGelisim);
this.dilGelisimi="pekiyi";
System.out.println("Ogrencinin Dil Gelisimi:" + this.dilGelisimi);
this.bilisselGelisim="gelistirilmeli";
System.out.println("Ogrencinin Bilissel Gelisimi:" + this.bilisselGelisim);
this.ozbakimBecerileri="pekiyi";
System.out.println("Ogrencinin OzBakim Becerileri:" + this.ozbakimBecerileri);
this.boy=1.30;
System.out.println("Ogrencinin Guncel Boyu:" + this.boy);
this.kilo=35;
System.out.println("Ogrencinin Guncel Kilosu:" + this.kilo);
break;
case "Yagmur":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.motorGelisim="iyi";
System.out.println("Ogrencinin Motor Gelisimi:" + this.motorGelisim);
this.sosyalDuygusalGelisim="pekiyi";
System.out.println("Ogrencinin Sosyal ve Duygusal Gelisimi:" + this.sosyalDuygusalGelisim);
this.dilGelisimi="pekiyi";
System.out.println("Ogrencinin Dil Gelisimi:" + this.dilGelisimi);
this.bilisselGelisim="gelistirilmeli";
System.out.println("Ogrencinin BiliÅŸsel Gelisimi:" + this.bilisselGelisim);
this.ozbakimBecerileri="pekiyi";
System.out.println("Ogrencinin OzBakim Becerileri:" + this.ozbakimBecerileri);
this.boy=1.30;
System.out.println("Ogrencinin Guncel Boyu:" + this.boy);
this.kilo=35;
System.out.println("Ogrencinin Guncel Kilosu:" + this.kilo);
break;
case "Tugba":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.motorGelisim="iyi";
System.out.println("Ogrencinin Motor Gelisimi:" + this.motorGelisim);
this.sosyalDuygusalGelisim="kotu";
System.out.println("Ogrencinin Sosyal ve Duygusal Gelisimi:" + this.sosyalDuygusalGelisim);
this.dilGelisimi="kotu";
System.out.println("Ogrencinin Dil Gelisimi:" + this.dilGelisimi);
this.bilisselGelisim="kotu";
System.out.println("Ogrencinin BiliÅŸsel Gelisimi:" + this.bilisselGelisim);
this.ozbakimBecerileri="kotu";
System.out.println("Ogrencinin OzBakim Becerileri:" + this.ozbakimBecerileri);
this.boy=1.10;
System.out.println("Ogrencinin Guncel Boyu:" + this.boy);
this.kilo=50;
System.out.println("Ogrencinin Guncel Kilosu:" + this.kilo);
break;
default:
System.out.println("BOYLE BIR OGRENCIMIZ BULUNMAMAKTADIR...");
break;
}
}
}
<file_sep>/src/dosyaislemleri/Dosyaislemleri.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dosyaislemleri;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.LinkedList;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Dosyaislemleri {
public void olustur(String dosyaadi) {
try {
File myObj = new File(dosyaadi+".txt");
if (myObj.createNewFile()) {
System.out.println("File created: " + myObj.getName());
System.out.println("Absolute path: " + myObj.getAbsolutePath());
} else {
System.out.println("File already exists.");
System.out.println("Absolute path: " + myObj.getAbsolutePath());
}
} catch (IOException e) {
System.out.println("An error occurred.");
e.printStackTrace();
}
}
public void write(String s,String dosyaAdi) {
try {
FileWriter write = new FileWriter(dosyaAdi+".txt");
write.write(s);
write.close();
System.out.println("SUCCESFULLY... ");
} catch (IOException e) {
System.out.println(e);
e.printStackTrace();
}
}
public void read(String dosyaadi) {
try {
File myObj = new File(dosyaadi+".txt");
Scanner myReader = new Scanner(myObj);
while (myReader.hasNextLine()) {
String data = myReader.nextLine();
System.out.println(data);
}
myReader.close();
} catch (FileNotFoundException e) {
System.out.println("An error occurred.");
e.printStackTrace();
}
}
public void olustur(LinkedList<String> anaYemek) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
public void read(LinkedList<String> anaYemek) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
public void write(String yemekler) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}<file_sep>/src/Arayuz/son/YoneticiGirisController.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Arayuz.son;
import java.io.IOException;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.PasswordField;
import javafx.scene.control.TextField;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
/**
* FXML Controller class
*
* @author grung
*/
public class YoneticiGirisController implements Initializable {
@FXML
private TextField kullaniciadi;
@FXML
private PasswordField sifre;
@FXML
private Button giris;
@FXML
private Button anasayfa1;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
// TODO
}
@FXML
private void kullaniciadi(ActionEvent event) {
}
@FXML
private void sifre(ActionEvent event) {
}
@FXML
void girisyap(MouseEvent event) throws IOException
{
if(this.kullaniciadi.getText().equals("Admin")&&this.sifre.getText().equals("password")){
Stage s = (Stage) giris.getScene().getWindow();
Parent root = FXMLLoader.load(getClass().getResource("OgrenciKayit.fxml"));
Scene scene = new Scene(root);
s.setScene(scene);
s.show();
}else{
Conformbox c=new Conformbox();
c.display("Uyarı", "Hatalı Kullanıcı Adı Veya Şifre");
}
}
@FXML
private void anasayfayadon(MouseEvent event) throws IOException
{
Stage s = (Stage) anasayfa1.getScene().getWindow();
Parent root = FXMLLoader.load(getClass().getResource("FXMLDocument.fxml"));
Scene scene = new Scene(root);
s.setScene(scene);
s.show();
}
}
<file_sep>/src/Arayuz/son/IlacTakipRaporuController.java
package Arayuz.son;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.ResourceBundle;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ListView;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextField;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
/**
* FXML Controller class
*
* @author grung
*/
public class IlacTakipRaporuController implements Initializable {
@FXML
private TextField adSoyad_gir;
@FXML
private TextField ilacAd_gir;
@FXML
private TextField ilacSaat_gir;
@FXML
private Button ekle;
@FXML
private Button sil;
@FXML
private ListView<String> tablo;
@FXML
private Button guncelle4;
@FXML
private Button once;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
// TODO
}
@FXML
void ekle(ActionEvent event) {
try {
ogr().ogrenciIlacKaydet();
Guncelle(event);
}
catch(Exception e){
}
}
private ogrenciler ogr(){
ogrenciler ogr = new ogrenciler();
ogr.setAdsoyadilac(adSoyad_gir.getText());
ogr.setIlacAd_gir(ilacAd_gir.getText());
ogr.setIlacSaat_gir(ilacSaat_gir.getText());
return ogr;
}
@FXML
void sil(ActionEvent event) {
ogrenciler ogr = new ogrenciler();
ogr.ogrenciSil(sec(),"ilac.txt");
Guncelle(event);
System.out.println("<NAME>");
}
@FXML
void Guncelle(ActionEvent event) {
try{
ogr().ogrenciIlacGuncelle(tablo.getSelectionModel().getSelectedItem().toString());
}
catch(Exception e){
e.getMessage();
}
tablo.getItems().clear();
ArrayList<String> okunan = new ArrayList();
Dosya od = new Dosya();
okunan=od.OgrDosyaOkuma("ilac.txt");
for (int i = 0; i <okunan.size(); i++) {
tablo.getItems().add(okunan.get(i));
}
}
private String sec(){
String sec=tablo.getSelectionModel().getSelectedItem();
return sec;
}
@FXML
void adSoyad(ActionEvent event) {
}
@FXML
void IlacAd(ActionEvent event) {
}
@FXML
void IlacSaat(ActionEvent event) {
}
@FXML
void oncekinedon(MouseEvent event) throws IOException {
Stage s = (Stage) once.getScene().getWindow();
Parent root = FXMLLoader.load(getClass().getResource("OgretmenAnasayfa.fxml"));
Scene scene = new Scene(root);
s.setScene(scene);
s.show();
}
}
<file_sep>/src/Veli/YemekMenusu.java
package Veli;
import dosyaislemleri.Dosyaislemleri;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Random;
public class YemekMenusu{
DosyaOkuma d = new DosyaOkuma();
Dosyaislemleri dosya=new Dosyaislemleri();
private LinkedList<String> anaYemek ,tatli,mesrubat,corba,hamurisi;
public int a;
private int m,t,c,h;
private String yemekler;
//public YemekMenusu()
///{
public void cikti0()
{
anaYemek=new LinkedList<String>();
tatli=new LinkedList<String>();
mesrubat=new LinkedList<String>();
corba=new LinkedList<String>();
hamurisi=new LinkedList<String>();
DosyaOkuma d=new DosyaOkuma();
anaYemek=d.oku("C:\\Users\\Administrator\\Desktop\\asd\\Anayemek.txt");
tatli=d.oku("C:\\Users\\Administrator\\Desktop\\asd\\tatli.txt");
mesrubat=d.oku("C:\\Users\\Administrator\\Desktop\\asd\\mesrubat.txt");
corba=d.oku("C:\\Users\\Administrator\\Desktop\\asd\\corba.txt");
hamurisi=d.oku("C:\\Users\\Administrator\\Desktop\\asd\\hamurisi.txt");
cikti();
}
//overload
public void cikti(){
a=anaYemek.size();
t=tatli.size();
c=corba.size();
h=hamurisi.size();
m=mesrubat.size();
cikti(this.a,this.c,this.t,this.m,this.h);
}
public void cikti(int a,int m,int t,int c,int h)
{
dosya.olustur("yemekMenusuPazartesi");
dosya.write(("Pazartesi:")+ this.anaYemek.get(random(a)) +("-") + this.corba.get(random(c)) +("-") + this.tatli.get(random(t)) +("-") + this.mesrubat.get(random(m))+("-") + this.hamurisi.get(random(h)), "yemekMenusuPazartesi");
dosya.olustur("yemekMenusuSali");
dosya.write(("Sali:")+this.anaYemek.get(random(a)) +("-") + this.corba.get(random(c)) +("-") + this.tatli.get(random(t)) +("-") + this.mesrubat.get(random(m))+("-") + this.hamurisi.get(random(h)), "yemekMenusuSali");
dosya.olustur("yemekMenusuCarsamba");
dosya.write(("Carsamba:") + this.anaYemek.get(random(a)) +("-") + this.corba.get(random(c)) +("-") + this.tatli.get(random(t)) + ("-") +this.mesrubat.get(random(m))+("-") + this.hamurisi.get(random(h)), "yemekMenusuCarsamba");
dosya.olustur("yemekMenusuPersembe");
dosya.write(("Persembe:")+ this.anaYemek.get(random(a)) + ("-") +this.corba.get(random(c)) +("-") + this.tatli.get(random(t)) +("-") + this.mesrubat.get(random(m))+("-") + this.hamurisi.get(random(h)), "yemekMenusuPersembe");
dosya.olustur("yemekMenusuCuma");
dosya.write(("Cuma:")+this.anaYemek.get(random(a)) +("-") + this.corba.get(random(c)) + ("-") +this.tatli.get(random(t)) + ("-") +this.mesrubat.get(random(m))+("-") + this.hamurisi.get(random(h)), "yemekMenusuCuma");
System.out.println("--------------Y E M E K M <NAME> U-----------------");
dosya.read("yemekMenusuPazartesi");
dosya.read("yemekMenusuSali");
dosya.read("yemekMenusuCarsamba");
dosya.read("yemekMenusuPersembe");
dosya.read("yemekMenusuCuma");
//System.out.println("PAZARTESI:");
//System.out.println("SALI:"+this.anaYemek.get(random(a))+"--"+this.corba.get(random(c))+"--"+this.tatli.get(random(t))+"--"+this.mesrubat.get(random(m))+"--"+this.hamurisi.get(random(h)));
///System.out.println("CARSAMBA:"+this.anaYemek.get(random(a))+"--"+this.corba.get(random(c))+"--"+this.tatli.get(random(t))+"--"+this.mesrubat.get(random(m))+"--"+this.hamurisi.get(random(h)));
// System.out.println("PERSEMBE:"+this.anaYemek.get(random(a))+"--"+this.corba.get(random(c))+"--"+this.tatli.get(random(t))+"--"+this.mesrubat.get(random(m))+"--"+this.hamurisi.get(random(h)));
// System.out.println("CUMA:"+this.anaYemek.get(random(a))+"--"+this.corba.get(random(c))+"--"+this.tatli.get(random(t))+"--"+this.mesrubat.get(random(m))+"--"+this.hamurisi.get(random(h)));
}
public int random(int temp){
Random r=new Random(); //random sınıfı
temp=r.nextInt(temp);
return temp;
}
public void yemekler(){
}
public ArrayList<String> yemekListesi() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
<file_sep>/src/Ogretmen/UykuRaporu.java
package Ogretmen;
import java.util.Scanner;
public class UykuRaporu {
private String isimSoyisim;
private double uykuSaati;
private double uyanmaSaati;
private double toplamUyku;
/*public UykuRaporu (double uykuSaati, double uyanmaSaati, double toplamUyku) {
this.uykuSaati=uykuSaati;
this.uyanmaSaati=uyanmaSaati;
this.toplamUyku=toplamUyku; }*/
Scanner klavye=new Scanner(System.in);
public String getIsimSoyisim() {
return isimSoyisim;
}
public void setIsimSoyisim(String isimSoyisim) {
this.isimSoyisim = isimSoyisim;
}
public double getuykuSaati () {
return uykuSaati;
}
public void setuykuSaati (double uykuSaati) {
this.uykuSaati=uykuSaati;
}
public double getuyanmaSaati () {
return uyanmaSaati;
}
public void setuyanmaSaati (double uyanmaSaati) {
this.uyanmaSaati=uyanmaSaati;
}
public double gettoplamUyku () {
return toplamUyku;
}
public void settoplamUyku (double toplamUyku) {
this.toplamUyku=toplamUyku;
}
public void cikti(String isimSoyisim)
{
System.out.println("------- U Y K U R A P O R U-------");
this.isimSoyisim=isimSoyisim;
System.out.println("Uyku bilgilerini ogrenmek istediginiz ogrencinin adini giriniz:");
isimSoyisim=klavye.next();
switch(isimSoyisim)
{
case "Cansu":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.uykuSaati= 13.15;
System.out.println("Ogrencinin uyku saati:" + this.uykuSaati);
this.uyanmaSaati=14.45;
System.out.println("Ogrencinin uyanma saati:" + this.uyanmaSaati);
this.toplamUyku=2;
System.out.println("Ogrencinin toplam uyku saati:" +this.toplamUyku);
break;
case "Yagmur":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.uykuSaati= 13.15;
System.out.println("Ogrencinin uyku saati:" + this.uykuSaati);
this.uyanmaSaati=14.45;
System.out.println("Ogrencinin uyanma saati:" + this.uyanmaSaati);
this.toplamUyku=2;
System.out.println("Ogrencinin toplam uyku saati:" +this.toplamUyku);
break;
case "Tugba":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.uykuSaati= 13.15;
System.out.println("Ogrencinin uyku saati:" + this.uykuSaati);
this.uyanmaSaati=14.45;
System.out.println("Ogrencinin uyanma saati:" + this.uyanmaSaati);
this.toplamUyku=2;
System.out.println("Ogrencinin toplam uyku saati:" +this.toplamUyku);
break;
default:
System.out.println("BOYLE BIR OGRENCIMIZ BULUNMAMAKTADIR...");
break;
}
}
/*System.out.println("");
System.out.println("------- Ogrenci Uyku Bilgisi -------");
System.out.println("Uyku Saati:" + this.uykuSaati);
System.out.println("Uyanma Saati:" + this.uyanmaSaati);
System.out.println("Toplam Uyku:" + this.toplamUyku);
}*/
}
<file_sep>/src/Arayuz/son/Dosya.java
package Arayuz.son;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.util.ArrayList;
public class Dosya {
// bundan sonrakiler öğrenci kayıt için oluşturulan metodlar
void DosyaKaydet(String a,String dosya) {
ArrayList<String> veri = new ArrayList<>();
veri.add(a);
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter(dosya, true);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
}
public ArrayList<String> OgrDosyaOkuma(String dosya) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream(dosya);
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
dStream.close();
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
for (int k = 0; k < veri.size(); k++) {
System.out.println(veri.get(k));
}
return veri;
}
public void OgrSil(String o,String dosya) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream(dosya);
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(o)) {
veri.remove(j);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter(dosya);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
public void OgrGuncelle(String a, String b,String dosya) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream(dosya);
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(a)) {
veri.remove(j);
veri.add(b);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter(dosya);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
// bundan sonrakiler genel bilgilendirme için oluşturulan metodlar
/*void GenelKaydet(String a) {
ArrayList<String> veri = new ArrayList<>();
String st = a;
veri.add(st);
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("genel.txt", true);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
}
public ArrayList<String> GenelOkuma() {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("genel.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
dStream.close();
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
for (int k = 0; k < veri.size(); k++) {
System.out.println(veri.get(k));
}
return veri;
}
public void GenelSil(String o) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("genel.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(o)) {
veri.remove(j);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("genel.txt");
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
public void GenelGuncelle(String a, String b) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("genel.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(a)) {
veri.remove(j);
veri.add(b);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("genel.txt");
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
// bundan sonrakiler okul bilgileri için oluşturulan metodlar
/*void BilgiKaydet(String a) {
ArrayList<String> veri = new ArrayList<>();
String st = a;
veri.add(st);
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("okulbilgi.txt", true);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
}
public ArrayList<String> BilgiOkuma() {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("okulbilgi.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
dStream.close();
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
for (int k = 0; k < veri.size(); k++) {
System.out.println(veri.get(k));
}
return veri;
}
public void BilgiSil(String o) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("okulbilgi.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(o)) {
veri.remove(j);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("okulbilgi.txt");
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
public void BilgiGuncelle(String a, String b) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("okulbilgi.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(a)) {
veri.remove(j);
veri.add(b);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("okulbilgi.txt");
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
// bundan sonrakiler okul fiyat bilgileri için oluşturulan metodlar
void FiyatBilgiKaydet(String a) {
ArrayList<String> veri = new ArrayList<>();
String st = a;
veri.add(st);
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("fiyat.txt", true);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
}
public ArrayList<String> FiyatBilgiOkuma() {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("fiyat.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
dStream.close();
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
for (int k = 0; k < veri.size(); k++) {
System.out.println(veri.get(k));
}
return veri;
}
public void FiyatBilgiSil(String o) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("fiyat.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(o)) {
veri.remove(j);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("fiyat.txt");
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
public void FiyatBilgiGuncelle(String a, String b) {
String str;
ArrayList<String> veri = new ArrayList<String>();
try {
FileInputStream fStream = new FileInputStream("fiyat.txt");
DataInputStream dStream = new DataInputStream(fStream);
BufferedReader bReader = new BufferedReader(new InputStreamReader(dStream));
while ((str = bReader.readLine()) != null) {
veri.add(str);
}
for (int j = 0; j < veri.size(); j++) {
String aranan = veri.get(j);
if (aranan.contains(a)) {
veri.remove(j);
veri.add(b);
}
}
dStream.close();
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("fiyat.txt");
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
} catch (Exception e) {
System.err.println("Hata: " + e.getMessage());
}
}
// bundan sonrakiler izin onay için oluşturulan metodlar
void IzinOnayKaydet(String a) {
ArrayList<String> veri = new ArrayList<>();
String st = a;
veri.add(st);
FileWriter fWriter = null;
BufferedWriter writer = null;
try {
fWriter = new FileWriter("izinonay.txt", true);
writer = new BufferedWriter(fWriter);
for (int i = 0; i < veri.size(); i++) {
writer.write(veri.get(i));
writer.newLine();
System.out.println(veri.get(i));
}
writer.close();
} catch (Exception e) {
// TODO: handle exception
}
}*/
}<file_sep>/src/Arayuz/son/GenelBilgilendirmeController.java
package Arayuz.son;
import java.io.IOException;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
public class GenelBilgilendirmeController implements Initializable {
@FXML
private Button anasayfa;
@Override
public void initialize(URL url, ResourceBundle rb) {
// TODO
}
@FXML
void anasayfayadon(MouseEvent event) throws IOException
{
Stage s = (Stage) anasayfa.getScene().getWindow();
Parent root = FXMLLoader.load(getClass().getResource("FXMLDocument.fxml"));
Scene scene = new Scene(root);
s.setScene(scene);
s.show();
}
}
<file_sep>/src/Ogretmen/IlacTakipRaporu.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Ogretmen;
import dosyaislemleri.Dosyaislemleri;
import java.util.Scanner;
public class IlacTakipRaporu {
private String isimSoyisim;
private String ilacIsim;
private double ilacZamani;
Scanner klavye=new Scanner(System.in);
Dosyaislemleri dosya=new Dosyaislemleri();
/*public IlacTakipRaporu(String isimSoyisim, String ilacIsim, double ilacZamani) {
this.isimSoyisim = isimSoyisim;
this.ilacIsim = ilacIsim;
this.ilacZamani = ilacZamani;
}*/
public String getIsimSoyisim() {
return isimSoyisim;
}
public void setIsimSoyisim(String isimSoyisim) {
this.isimSoyisim = isimSoyisim;
}
public String getIlacIsim() {
return ilacIsim;
}
public void setIlacIsim(String ilacIsim) {
this.ilacIsim = ilacIsim;
}
public double getilacZamani () {
return ilacZamani;
}
public void setilacZamani (double ilacZamani) {
this.ilacZamani=ilacZamani;
}
public void cikti(String isimSoyisim)
{
this.isimSoyisim=isimSoyisim;
System.out.println("-------I L A C T A K I P R A P O R U-------");
System.out.println("Ilac Takip bilgilerini ogrenmek istediginiz ogrencinin adini giriniz:");
isimSoyisim=klavye.next();
switch(isimSoyisim)
{
case "Cansu":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.ilacIsim= "xanax";
System.out.println("Ogrencinin kullanmasi gereken ilac:" + this.ilacIsim);
this.ilacZamani=13.00;
System.out.println("Ogrencinin ilaci kullanmasi gereken saat:" + this.ilacZamani);
break;
case "Yagmur":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.ilacIsim= "xanax";
System.out.println("Ogrencinin kullanmasi gereken ilac:" + this.ilacIsim);
this.ilacZamani=13.30;
System.out.println("Ogrencinin ilaci kullanmasi gereken saat:" + this.ilacZamani);
break;
case "Tugba":
//System.out.println("Ogrencinin ismi:" + this.getIsimSoyisim());
this.ilacIsim= "xanax";
System.out.println("Ogrencinin kullanmasi gereken ilac:" + this.ilacIsim);
this.ilacZamani=13.30;
System.out.println("Ogrencinin ilaci kullanmasi gereken saat:" + this.ilacZamani);
break;
default:
System.out.println("BOYLE BIR OGRENCIMIZ BULUNMAMAKTADIR...");
break;
}
dosya.olustur("IlacTakipRaporu");
System.out.println("Ilac Takip Raporu" );
dosya.read("IlacTakipRaporu");
//System.out.println("");
//System.out.println("------- Ilac Bilgisi -------");
//System.out.println("Ogrencinin ismi:" + this.isimSoyisim);
//System.out.println("Ilacin ismi:" + this.ilacIsim);
//System.out.println("Ilacin zamani:" + this.ilacZamani);
}
}
<file_sep>/src/Yonetici/OgrenciKayit.java
package Yonetici;
import dosyaislemleri.Dosyaislemleri;
import java.util.Scanner;
public class OgrenciKayit {
private String isim;
private String soyisim;
private String cinsiyet;
private int dogumyili;
public String getIsim() {
return isim;
}
public void setIsim(String isim) {
this.isim = isim;
}
public String getSoyisim() {
return soyisim;
}
public void setSoyisim(String soyisim) {
this.soyisim = soyisim;
}
/*public OgrenciKayit(String isimsoyisim, String cinsiyet, int dogumyili) {
this.isimsoyisim = isimsoyisim;
this.cinsiyet = cinsiyet;
this.dogumyili = dogumyili;
}*/
Dosyaislemleri dosya=new Dosyaislemleri();
Scanner klavye=new Scanner(System.in);
public String getCinsiyet() {
return cinsiyet;
}
public void setCinsiyet(String cinsiyet) {
this.cinsiyet = cinsiyet;
}
public int getDogumyili() {
return dogumyili;
}
public void setDogumyili(int dogumyili) {
this.dogumyili = dogumyili;
}
public void ogrencibilgileri()
{
System.out.println("------O G R E N C I K A Y I T---------");
System.out.println("Lutfen kaydedilecek ogrencinin adini giriniz:");
this.isim=klavye.next();
System.out.println("Lutfen kaydedilecek ogrencinin soyadini giriniz");
this.soyisim=klavye.next();
System.out.println("Lutfen kaydedilecek ogrencinin dogum yilini giriniz");
this.dogumyili=klavye.nextInt();
System.out.println("Lutfen kaydedilecek ogrencinin cinsiyetini giriniz");
this.cinsiyet=klavye.next();
dosya.olustur("yenikayit");
dosya.write(("Son Kaydedilen Ogrencinin Adi:") +this.getIsim()+(" Son Kaydedilen Ogrencinin Soyadi:") +this.getSoyisim()+(" Son Kaydedilen Ogrencinin Cinsiyeti:") + this.getCinsiyet() +(" Son Kaydedilen Ogrencinin Dogum Yili:") + this.getDogumyili() , "yenikayit");
dosya.read("yenikayit");
}
}
<file_sep>/src/Arayuz/son/GelisimRaporuController.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Arayuz.son;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.ResourceBundle;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ListView;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
/**
* FXML Controller class
*
* @author grung
*/
public class GelisimRaporuController implements Initializable {
@FXML
private Button ekle;
@FXML
private TextArea motorGelisim;
@FXML
private TextArea sosyalDuygusalGelisim;
@FXML
private TextArea ozbakimGelisim;
@FXML
private TextArea dilGelisim;
@FXML
private TextField adSoyad_gir;
@FXML
private TextArea bilisselGelisim;
@FXML
private ListView<String> tablo;
@FXML
private Button sil;
@FXML
private Button guncelle3;
@FXML
private Button once;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
// TODO
}
@FXML
void ekle(ActionEvent event) {
try {
ogrenciler ogr = new ogrenciler();
ogr.setAdsoyadgelisim(adSoyad_gir.getText());
ogr.setMotorGelisim(motorGelisim.getText());
ogr.setSosyalDuygusalGelisim(sosyalDuygusalGelisim.getText());
ogr.setOzbakimGelisim(ozbakimGelisim.getText());
ogr.setDilGelisim(dilGelisim.getText());
ogr.setBilisselGelisim(bilisselGelisim.getText());
ogr.ogrenciGelisimKaydet();
Guncelle(event);
}
catch(Exception e){
}
}
@FXML
void motorGelisim(MouseEvent event) {
}
@FXML
void sosyalDuygusalGelisim(MouseEvent event) {
}
@FXML
void ozbakimGelisim(MouseEvent event) {
}
@FXML
void dilGelisim(MouseEvent event) {
}
@FXML
void adSoyad_gir(ActionEvent event) {
}
@FXML
void bilisselGelisim(MouseEvent event) {
}
private ogrenciler ogr(){
ogrenciler ogr = new ogrenciler();
ogr.setAdsoyadgelisim(adSoyad_gir.getText());
ogr.setMotorGelisim(motorGelisim.getText());
ogr.setSosyalDuygusalGelisim(sosyalDuygusalGelisim.getText());
ogr.setOzbakimGelisim(ozbakimGelisim.getText());
ogr.setDilGelisim(dilGelisim.getText());
ogr.setBilisselGelisim(bilisselGelisim.getText());
return ogr;
}
@FXML
void sil(ActionEvent event) {
ogrenciler ogr = new ogrenciler();
String ogrSil = tablo.getSelectionModel().getSelectedItem();
ogr.ogrenciSil(ogrSil,"gelisim.txt");
Guncelle(event);
System.out.println("Seçilen Silindi");
}
@FXML
void Guncelle(ActionEvent event) {
try{
ogr().ogrenciGelisimGuncelle(tablo.getSelectionModel().getSelectedItem().toString());
}catch(Exception e){
e.getMessage();
}
tablo.getItems().clear();
ArrayList<String> okunan = new ArrayList();
Dosya od = new Dosya();
okunan=od.OgrDosyaOkuma("gelisim.txt");
for (int i = 0; i <okunan.size(); i++) {
tablo.getItems().add(okunan.get(i));
}}
@FXML
void oncekinedon(MouseEvent event) throws IOException {
Stage s = (Stage) once.getScene().getWindow();
Parent root = FXMLLoader.load(getClass().getResource("OgretmenAnasayfa.fxml"));
Scene scene = new Scene(root);
s.setScene(scene);
s.show();
}
}
| 02ef18792c05827aa85cc32df86f0f5e99c4f27c | [
"Java"
] | 11 | Java | yagmurakinci/KresOtomasyonu | 020fcaa00a82cd47b1ee51963b8d46308c44341c | b328f12529d090b0c1280dba47577a8843b0e4a9 |
refs/heads/master | <file_sep># How to use
1. Install project tools: `npm install -g parcel-bundler typescript`
2. Install project dependencies: `npm install`
3. Start game server: `parcel serve src/index.html --open`
4. Build game: `parcel build src/index.html`
<file_sep>import {Game} from 'phaser'
new Game({
width: 1200,
height: 700
})
| d21ad5772bbd620456a74533f6f429e0769e1401 | [
"Markdown",
"TypeScript"
] | 2 | Markdown | awulkan/phaser-3-boilerplate | 45248134a0aff9a11c69e0db13f90488bc9698ec | 1655de47df78cbcaa8606bcc8887087de4141b09 |
refs/heads/master | <file_sep>//coded by <NAME>
$(document).ready(function() {
$('#random').click(function() {
var randomSearch = "https://en.wikipedia.org/wiki/Special:Random";
window.open(randomSearch);
});
$('#help').click(function() {
var helpPage = "help.html";
window.open(helpPage);
});
$("#searchButton").click(function() {
var q = $("#searchbox").val();
$.getJSON("https://en.wikipedia.org/w/api.php?callback=?", {
srsearch: q,
action: "query",
list: "search",
format: "json"
},
function(data) {
$("#results").empty();
$.each(data.query.search, function(i, item) {
$("#results").append("<div id='resultDiv' class='center'>" + item.title + " " + item.snippet + " ...<a href='http://en.wikipedia.org/wiki/" + encodeURIComponent(item.title) + " '> more</a>" + "</div>");
});
});
});
}); | 8f02dea0787f9fad1b45ca2b678e0de5c86597c7 | [
"JavaScript"
] | 1 | JavaScript | anapandey/WikipediaSearchApp | 1a664bf3313f7ffacdc8341f62be4a8ab49e9fe3 | e3424110545631e07b72cce5b56ca97fd24073a8 |
refs/heads/master | <file_sep>let columns = 15;
let rows = 15;
let numberOfItems = rows * columns;
let mines = 30;
let grid = [];
let gameBoard = document.getElementById("littleBlanket");
const generateGrid = () => {
for (let i = 0; i < rows * columns; i++) {
grid.push("0")[i];
}
return grid;
};
generateGrid();
console.log(grid);
const generateRandomIndex = () => Math.floor(Math.random() * numberOfItems);
const alreadyMine = square => (square === "X" ? true : false);
const addMines = () => {
let numberofMines = 0;
while (numberofMines < mines) {
let randomIndex = generateRandomIndex();
if (!alreadyMine(grid[randomIndex])) {
grid.splice(randomIndex, 1, "X");
numberofMines = numberofMines + 1;
}
}
};
addMines();
console.log(grid);
const generateHTML = () => {
for (let i = 0; i < numberOfItems; i++) {
let field = grid[i];
field = document.createElement("th");
gameBoard.appendChild(field);
}
};
| 8dedd38f402bf8d0ae991cf289de3b3b9df2109e | [
"JavaScript"
] | 1 | JavaScript | sambuckley1995/Minesweeperish | 25824f787959fdff54e8cbae0262f77afbc96949 | 56475bc98f5c4215ef30a60ec43d17d325e31016 |
refs/heads/master | <file_sep><?php
/*-----------------------------------------------------------------------------------*/
/*
/* Button Config
/*
/*-----------------------------------------------------------------------------------*/
$visual_shortcodes['button'] = array(
'no_preview' => true,
'params' => array(
'url' => array(
'std' => '',
'type' => 'text',
'label' => __('Button URL', 'framework'),
'desc' => __('Add the button\'s url eg http://example.com', 'framework')
),
'style' => array(
'type' => 'select',
'label' => __('Button Style', 'framework'),
'desc' => __('Select the button\'s style, ie the button\'s colour', 'framework'),
'options' => array(
'default' => 'Default (set by theme)',
'grey' => 'Grey',
'green' => 'Green',
'light-blue' => 'Light Blue',
'blue' => 'Blue',
'red' => 'Red',
'orange' => 'Orange',
'purple' => 'Purple',
'black' => 'Black',
'white' => 'White',
)
),
'size' => array(
'type' => 'select',
'label' => __('Button Size', 'framework'),
'desc' => __('Select the button\'s size', 'framework'),
'options' => array(
'small' => 'Small',
'medium' => 'Medium',
'large' => 'Large'
)
),
'target' => array(
'type' => 'select',
'label' => __('Button Target', 'framework'),
'desc' => __('_self = open in same window. _blank = open in new window', 'framework'),
'options' => array(
'_self' => '_self',
'_blank' => '_blank'
)
),
'content' => array(
'std' => 'Button Text',
'type' => 'text',
'label' => __('Button\'s Text', 'framework'),
'desc' => __('Add the button\'s text', 'framework'),
)
),
'shortcode' => '[visual_button url="{{url}}" style="{{style}}" size="{{size}}" target="{{target}}"] {{content}} [/visual_button]',
'popup_title' => __('Insert Button Shortcode', 'framework')
);
/*-----------------------------------------------------------------------------------*/
/*
/* Alert Config
/*
/*-----------------------------------------------------------------------------------*/
$visual_shortcodes['alert'] = array(
'no_preview' => true,
'params' => array(
'style' => array(
'type' => 'select',
'label' => __('Alert Style', 'framework'),
'desc' => __('Select the alert\'s style, ie the alert colour', 'framework'),
'options' => array(
'white' => 'White',
'grey' => 'Grey',
'red' => 'Red',
'yellow' => 'Yellow',
'green' => 'Green'
)
),
'content' => array(
'std' => 'Your Alert!',
'type' => 'textarea',
'label' => __('Alert Text', 'framework'),
'desc' => __('Add the alert\'s text', 'framework'),
)
),
'shortcode' => '[visual_alert style="{{style}}"] {{content}} [/visual_alert]',
'popup_title' => __('Insert Alert Shortcode', 'framework')
);
/*-----------------------------------------------------------------------------------*/
/*
/* Toggle Config
/*
/*-----------------------------------------------------------------------------------*/
$visual_shortcodes['toggle'] = array(
'no_preview' => true,
'params' => array(
'title' => array(
'type' => 'text',
'label' => __('Toggle Content Title', 'framework'),
'desc' => __('Add the title that will go above the toggle content', 'framework'),
'std' => 'Title'
),
'content' => array(
'std' => 'Content',
'type' => 'textarea',
'label' => __('Toggle Content', 'framework'),
'desc' => __('Add the toggle content. Will accept HTML', 'framework'),
),
'state' => array(
'type' => 'select',
'label' => __('Toggle State', 'framework'),
'desc' => __('Select the state of the toggle on page load', 'framework'),
'options' => array(
'open' => 'Open',
'closed' => 'Closed'
)
),
),
'shortcode' => '[visual_toggle title="{{title}}" state="{{state}}"] {{content}} [/visual_toggle]',
'popup_title' => __('Insert Toggle Content Shortcode', 'framework')
);
/*-----------------------------------------------------------------------------------*/
/*
/* Tabs Config
/*
/*-----------------------------------------------------------------------------------*/
$visual_shortcodes['tabs'] = array(
'params' => array(),
'no_preview' => true,
'shortcode' => '[visual_tabs] {{child_shortcode}} [/visual_tabs]',
'popup_title' => __('Insert Tab Shortcode', 'framework'),
'child_shortcode' => array(
'params' => array(
'title' => array(
'std' => 'Title',
'type' => 'text',
'label' => __('Tab Title', 'framework'),
'desc' => __('Title of the tab', 'framework'),
),
'content' => array(
'std' => 'Tab Content',
'type' => 'textarea',
'label' => __('Tab Content', 'framework'),
'desc' => __('Add the tabs content', 'framework')
)
),
'shortcode' => '[visual_tab title="{{title}}"] {{content}} [/visual_tab]',
'clone_button' => __('Add Tab', 'framework')
)
);
/*-----------------------------------------------------------------------------------*/
/*
/* Columns Config
/*
/*-----------------------------------------------------------------------------------*/
$visual_shortcodes['columns'] = array(
'params' => array(),
'shortcode' => ' {{child_shortcode}} ', // as there is no wrapper shortcode
'popup_title' => __('Insert Columns Shortcode', 'framework'),
'no_preview' => true,
// child shortcode is clonable & sortable
'child_shortcode' => array(
'params' => array(
'column' => array(
'type' => 'select',
'label' => __('Column Type', 'framework'),
'desc' => __('Select the type, ie width of the column.', 'framework'),
'options' => array(
'visual_one_third' => 'One Third',
'visual_one_third_last' => 'One Third Last',
'visual_two_third' => 'Two Thirds',
'visual_two_third_last' => 'Two Thirds Last',
'visual_one_half' => 'One Half',
'visual_one_half_last' => 'One Half Last',
'visual_one_fourth' => 'One Fourth',
'visual_one_fourth_last' => 'One Fourth Last',
'visual_three_fourth' => 'Three Fourth',
'visual_three_fourth_last' => 'Three Fourth Last'
)
),
'content' => array(
'std' => '',
'type' => 'textarea',
'label' => __('Column Content', 'framework'),
'desc' => __('Add the column content.', 'framework'),
)
),
'shortcode' => '[{{column}}] {{content}} [/{{column}}] ',
'clone_button' => __('Add Column', 'framework')
)
);
?><file_sep><?php
/*
Plugin Name: Visualkicks - Core
Plugin URI: http://www.visualkicks.com
Description: Adds core features like favicons, tracking code & update notifications via the 'Theme Settings' page
Version: 1.3
Author: Visualkicks
Author URI: http://www.visualkicks.com
Changelog
09/12/2013 - Version 1.3
- The update alerts are now site wide and dissmissable and will return 7 days later
- Update alerts will only display to super admins
09/12/2013 - Version 1.2
- removed the size attribute from header link icons (html5 invalid)
/*-----------------------------------------------------------------------------------*/
if ( ! class_exists( 'Visualkicks_Core' ) ) :
class Visualkicks_Core {
function __construct() {
// activation
register_activation_hook( __FILE__, array( &$this, 'plugin_activation' ) );
// register settings
add_action( 'admin_init', array( &$this, 'vk_core_settings' ) );
// register page
add_action( 'admin_menu', array( &$this, 'vk_add_core_page' ) );
// scripts & styles
if (isset($_GET['page']) && $_GET['page'] == 'settings') { add_action('admin_print_scripts', array( &$this, 'vk_print_core_scripts' ) ); }
// output
add_action('wp_head', array( &$this, 'vk_favicon') );
add_action('wp_footer', array( &$this, 'vk_analytics') );
// update notice
add_action('admin_notices', array( &$this, 'notice_show') );
add_action('admin_init', array( &$this, 'notice_hide') );
}
/*--------------------------------------------------------------------*/
/*
/* Functions
/*
/*--------------------------------------------------------------------*/
// activation
function plugin_activation() {
flush_rewrite_rules();
}
// register settings
function vk_core_settings(){
register_setting( 'theme_settings', 'theme_settings' );
}
// register page
function vk_add_core_page() {
add_menu_page( __( 'Theme Settings' ), __( 'Theme Settings' ), 'manage_options', 'settings', array( &$this, 'vk_core_page' ) );
}
// scripts
function vk_print_core_scripts() {
wp_enqueue_media();
wp_register_script('vk_upload', plugins_url( '/vk_upload.js', __FILE__ ), array('jquery','media-upload','thickbox') );
wp_enqueue_script('vk_upload');
}
// favicons output
function vk_favicon() {
// get the images
$options = get_option( 'theme_settings' );
$favicon = $options['favicon'];
$tablet1 = $options['tablet1'];
$tablet2 = $options['tablet2'];
$tablet3 = $options['tablet3'];
// favicon
if ( $favicon != '') {
echo '<link rel="shortcut icon" href="'. $favicon .'?v=2"/>'."\n";
} else { ?>
<link rel="shortcut icon" href="<?php echo plugins_url(); ?>/visualkicks-core/favicon.ico?v=2" />
<?php }
// tablet 3
if ( $tablet3 != '') {
echo '<link rel="apple-touch-icon" href="'. $tablet3 .'?v=2"/>'."\n";
} else { ?>
<link rel="apple-touch-icon" href="<?php echo plugins_url(); ?>/visualkicks-core/tablet3.png?v=2" />
<?php }
// tablet 2
if ( $tablet2 != '') {
echo '<link rel="apple-touch-icon" href="'. $tablet2 .'?v=2"/>'."\n";
} else { ?>
<link rel="apple-touch-icon" href="<?php echo plugins_url(); ?>/visualkicks-core/tablet2.png?v=2" />
<?php }
// tablet 1
if ( $tablet1 != '') {
echo '<link rel="apple-touch-icon" href="'. $tablet1 .'?v=2"/>'."\n";
} else { ?>
<link rel="apple-touch-icon" href="<?php echo plugins_url(); ?>/visualkicks-core/tablet1.png?v=2" />
<?php }
}
// analytics output
function vk_analytics(){
// get the tracking code
$options = get_option( 'theme_settings' );
$output = $options['tracking'];
// echo the tracking code
if ( $output <> "" ) echo stripslashes($output) . "\n";
}
// update notice
function notice_show() {
// check current against new versions
$current = wp_get_theme()->Version;
$new = $this->vk_xml_updates();
// if a new version is available
if($new!='') {
if(version_compare( $current, $new) == -1 ) {
// the user
global $current_user;
$user_id = $current_user->ID;
// if user is super admin only
if( is_super_admin($user_id) ) {
// show notice once a day
$interval = 86400;
// get the time the user last hid the notice
$last = get_user_meta($user_id, 'notice_time', true);
// get the time right now
$now = time();
// if this is the first time the notice is shown
// OR if the time between now and last is larger then the interval
if ( $last=='' || (( $now - $last ) > $interval) ) {
echo '<div class="updated">';
echo '<p><strong>Theme Update Available</strong></p>';
echo '<p>Please download the latest version from themeforest.net and keep up to date. Change logs can be found on the bottom of the item profile page.</p>';
echo '<p>';
printf(__('<a class="button" href="%1$s">Remind Me Later</a>'), '?notice_hide=0');
echo '</p></div>';
}
}
}
}
}
// close notice
function notice_hide() {
// user id
global $current_user;
$user_id = $current_user->ID;
// update the notice time when the user clicks hide
if ( isset($_GET['notice_hide']) && '0' == $_GET['notice_hide'] ) {
update_user_meta($user_id, 'notice_time', time() );
}
}
// get xml data
function vk_xml_data() {
$xmlurl = 'http://www.visualkicks.com/xml/update.xml';
$content = wp_remote_get( $xmlurl );
$body = wp_remote_retrieve_body($content);
if($body!='') {
$xml = simplexml_load_string($body);
return $xml;
} else {
return false;
}
}
// check xml theme version
function vk_xml_updates() {
$xml = $this->vk_xml_data();
if($xml!='') {
$themename = strtolower(wp_get_theme());
return $xml->updates->$themename;
} else {
return false;
}
}
/*--------------------------------------------------------------------*/
/*
/* Page Markup
/*
/*--------------------------------------------------------------------*/
function vk_core_page() {
if ( ! isset( $_REQUEST['settings-updated'] ) ) {
$_REQUEST['settings-updated'] = false;
} ?>
<!-- page tart -->
<div class="wrap">
<!-- icon -->
<div id="icon-generic" class="icon32"><br></div>
<!-- title -->
<h2><?php _e( 'Theme Settings' ); ?></h2>
<!-- saved message -->
<?php if ( false !== $_REQUEST['settings-updated'] ) { ?>
<div id="setting-error-settings_updated" class="updated settings-error">
<p><strong>Settings saved.</strong></p></div>
<?php } ?>
<!-- form start -->
<form method="post" action="options.php">
<?php
// setting fields
settings_fields( 'theme_settings' );
// get the options
$options = get_option( 'theme_settings' );
// is set fallbacks
if(!isset($options['tracking'])) { $options['tracking']=''; }
if(!isset($options['favicon'])) { $options['favicon']=''; }
if(!isset($options['tablet1'])) { $options['tablet1']=''; }
if(!isset($options['tablet2'])) { $options['tablet2']=''; }
if(!isset($options['tablet3'])) { $options['tablet3']=''; }
?>
<table class="form-table"><tbody>
<!-- tracking code -->
<tr valign="top">
<th scope="row"><label for="theme_settings[tracking]"><?php _e( 'Analytics Tracking code', 'framework' ); ?></label></th>
<td><fieldset>
<p><label for="theme_settings[tracking]"><?php _e('Any tracking code placed here will be inserted at the very bottom of every page.','framework'); ?></label></p>
<p><textarea name="theme_settings[tracking]" id="theme_settings[tracking]" class="large-text code" rows="5" cols="50"><?php esc_attr_e( $options['tracking'] ); ?></textarea></p>
</fieldset></td>
</tr>
<!-- favicon -->
<tr valign="top">
<th scope="row"><label for="theme_settings[favicon]"><?php _e( 'Favicon (32 x 32)', 'framework' ); ?></label></th>
<td><fieldset>
<p><input class="imgfield" id="theme_settings[favicon]" name="theme_settings[favicon]" type="text" size="36" value="<?php echo $options['favicon']; ?>"/>
<input class="button upload_button" type="button" value="Upload Image"/></p>
<p><img src="<?php echo $options['favicon']; ?>" style="max-height: 200px;"></p>
</fieldset></td>
</tr>
<!-- tablet favicon 1 -->
<tr valign="top">
<th scope="row"><label for="theme_settings[tablet1]"><?php _e( 'Tablet Favicon 1 (72 x 72)', 'framework' ); ?></label></th>
<td><fieldset>
<p><input class="imgfield" id="theme_settings[tablet1]" name="theme_settings[tablet1]" type="text" size="36" value="<?php echo $options['tablet1']; ?>"/>
<input class="button upload_button" type="button" value="Upload Image"/></p>
<p><img src="<?php echo $options['tablet1']; ?>" style="max-height: 200px;"></p>
</fieldset></td>
</tr>
<!-- tablet favicon 2 -->
<tr valign="top">
<th scope="row"><label for="theme_settings[tablet2]"><?php _e( 'Tablet Favicon 2 (114 x 114)', 'framework' ); ?></label></th>
<td><fieldset>
<p><input class="imgfield" id="theme_settings[tablet2]" name="theme_settings[tablet2]" type="text" size="36" value="<?php echo $options['tablet2']; ?>"/>
<input class="button upload_button" type="button" value="Upload Image"/></p>
<p><img src="<?php echo $options['tablet2']; ?>" style="max-height: 200px;"></p>
</fieldset></td>
</tr>
<!-- tablet favicon 3 -->
<tr valign="top">
<th scope="row"><label for="theme_settings[tablet3]"><?php _e( 'Tablet Favicon 3 (144 x 144)', 'framework' ); ?></label></th>
<td><fieldset>
<p><input class="imgfield" id="theme_settings[tablet3]" name="theme_settings[tablet3]" type="text" size="36" value="<?php echo $options['tablet3']; ?>"/>
<input class="button upload_button" type="button" value="Upload Image"/></p>
<p><img src="<?php echo $options['tablet3']; ?>" style="max-height: 200px;"></p>
</fieldset></td>
</tr>
</tbody></table>
<!-- submit -->
<p class="submit"><input type="submit" name="submit" id="submit" class="button button-primary" value="Save Changes"></p>
</form>
</div><!-- end .wrap -->
<?php
}
// sanitize and validate
function options_validate( $input ) {
$input['tracking'] = wp_filter_post_kses( $input['tracking'] );
return $input;
}
}
new Visualkicks_Core;
endif;
?><file_sep><?php
namespace MavenAlgolia\Core;
use MavenAlgolia\Core\Registry;
class Initializer {
private static $instance;
// Register the algolia search js
public static function init(){
if ( ! self::$instance ){
self::$instance = new self();
}
add_action( 'wp_enqueue_scripts', array( self::$instance,'registerScripts' ) );
}
public function registerScripts(){
wp_register_script('mvnAlgoliaSearch', Registry::instance()->getPluginUrl() . 'lib/algoliasearch.min.js', array('jquery'), Registry::instance()->getPluginVersion() );
if( ! is_admin() && Registry::instance()->isEnabled() ){
// Front css styles
wp_register_style( 'mvnAlgoliaPrediction', Registry::instance()->getPluginUrl() . 'front/assets/styles/predictions.css', array(), Registry::instance()->getPluginVersion() );
wp_enqueue_style( 'mvnAlgoliaPrediction' );
$adminUrl = admin_url();
$homeUrl = set_url_scheme( home_url() );
// Front js script
$settings = array(
'siteUrl' => $homeUrl,
'ajaxUrl' => $adminUrl . "admin-ajax.php",
'appId' => Registry::instance()->getAppId(),
'apiKeySearch' => Registry::instance()->getApiKeySearch(),
'indexName' => Registry::instance()->getDefaultIndex(),
'showExcerpt' => 0, // Should be an Integer 0 | 1
'indexTaxonomies' => (int)Registry::instance()->indexTaxonomies(), // Should be an Integer 0 | 1
'taxonomiesToIndex' => (Registry::instance()->indexTaxonomies()) ? FieldsHelper::getTaxonomiesToIndex() : array(),
'labels' => array( 'taxonomies' => FieldsHelper::getTaxonomyLabels(), 'posts' => __('Posts') )
);
wp_localize_script( 'mvnAlgoliaSearch', 'mvnAlgSettings', $settings, Registry::instance()->getPluginVersion() );
wp_register_script( 'mvnAlgoliaPrediction', Registry::instance()->getPluginUrl() . 'front/assets/scripts/predictions.js', array( 'jquery', 'jquery-ui-autocomplete', 'mvnAlgoliaSearch' ), Registry::instance()->getPluginVersion() );
wp_enqueue_script( 'mvnAlgoliaPrediction' );
$vars = array(
'inputSearchName' => 's',
'containerId' => 'mvn-alg-predictions',
'postsPerPage' => 5,
// 'labels' => array(
// ),
);
wp_localize_script( 'mvnAlgoliaPrediction', 'mvnAlgSearchVars', $vars, Registry::instance()->getPluginVersion() );
}
}
}<file_sep><?php
/*-----------------------------------------------------------------------------------*/
/*
/* Column Shortcodes
/*
/*-----------------------------------------------------------------------------------*/
if (!function_exists('visual_one_third')) {
function visual_one_third( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-one-third">' . do_shortcode($content) . '</div>';
}
add_shortcode('visual_one_third', 'visual_one_third');
}
if (!function_exists('visual_one_third_last')) {
function visual_one_third_last( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-one-third visual-column-last">' . do_shortcode($content) . '</div><div class="clear"></div>';
}
add_shortcode('visual_one_third_last', 'visual_one_third_last');
}
if (!function_exists('visual_two_third')) {
function visual_two_third( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-two-third">' . do_shortcode($content) . '</div>';
}
add_shortcode('visual_two_third', 'visual_two_third');
}
if (!function_exists('visual_two_third_last')) {
function visual_two_third_last( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-two-third visual-column-last">' . do_shortcode($content) . '</div><div class="clear"></div>';
}
add_shortcode('visual_two_third_last', 'visual_two_third_last');
}
if (!function_exists('visual_one_half')) {
function visual_one_half( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-one-half">' . do_shortcode($content) . '</div>';
}
add_shortcode('visual_one_half', 'visual_one_half');
}
if (!function_exists('visual_one_half_last')) {
function visual_one_half_last( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-one-half visual-column-last">' . do_shortcode($content) . '</div><div class="clear"></div>';
}
add_shortcode('visual_one_half_last', 'visual_one_half_last');
}
if (!function_exists('visual_one_fourth')) {
function visual_one_fourth( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-one-fourth">' . do_shortcode($content) . '</div>';
}
add_shortcode('visual_one_fourth', 'visual_one_fourth');
}
if (!function_exists('visual_one_fourth_last')) {
function visual_one_fourth_last( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-one-fourth visual-column-last">' . do_shortcode($content) . '</div><div class="clear"></div>';
}
add_shortcode('visual_one_fourth_last', 'visual_one_fourth_last');
}
if (!function_exists('visual_three_fourth')) {
function visual_three_fourth( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-three-fourth">' . do_shortcode($content) . '</div>';
}
add_shortcode('visual_three_fourth', 'visual_three_fourth');
}
if (!function_exists('visual_three_fourth_last')) {
function visual_three_fourth_last( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
return '<div class="visual-three-fourth visual-column-last">' . do_shortcode($content) . '</div><div class="clear"></div>';
}
add_shortcode('visual_three_fourth_last', 'visual_three_fourth_last');
}
/*-----------------------------------------------------------------------------------*/
/*
/* Buttons
/*
/*-----------------------------------------------------------------------------------*/
if (!function_exists('visual_button')) {
function visual_button( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
extract(shortcode_atts(array(
'url' => '#',
'target' => '_self',
'style' => 'default',
'size' => 'small',
), $atts));
return '<a target="'.$target.'" class="button visual-button '.$size.' '.$style.'" href="'.$url.'">' . do_shortcode($content) . '</a>';
}
add_shortcode('visual_button', 'visual_button');
}
/*-----------------------------------------------------------------------------------*/
/*
/* Code
/*
/*-----------------------------------------------------------------------------------*/
if (!function_exists('code')) {
function code( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
$defaults = array();
extract( shortcode_atts( $defaults, $atts ) );
return '<code>' . $content . '</code>';
}
add_shortcode('code', 'code');
}
/*-----------------------------------------------------------------------------------*/
/*
/* Alerts
/*
/*-----------------------------------------------------------------------------------*/
if (!function_exists('visual_alert')) {
function visual_alert( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
extract(shortcode_atts(array(
'style' => 'white'
), $atts));
return '<div class="visual-alert '.$style.'">' . $content . '</div>';
}
add_shortcode('visual_alert', 'visual_alert');
}
/*-----------------------------------------------------------------------------------*/
/*
/* Toggle Shortcodes
/*
/*-----------------------------------------------------------------------------------*/
if (!function_exists('visual_toggle')) {
function visual_toggle( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
extract(shortcode_atts(array(
'title' => 'Title goes here',
'state' => 'open'
), $atts));
return "<div data-id='".$state."' class=\"visual-toggle\"><span class=\"visual-toggle-title\"><span class=\"title-icon\"></span>". $title ."</span><div class=\"visual-toggle-inner\">". do_shortcode($content) ."</div></div>";
}
add_shortcode('visual_toggle', 'visual_toggle');
}
/*-----------------------------------------------------------------------------------*/
/*
/* Tabs Shortcodes
/*
/*-----------------------------------------------------------------------------------*/
if (!function_exists('visual_tabs')) {
function visual_tabs( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
$defaults = array();
extract( shortcode_atts( $defaults, $atts ) );
STATIC $i = 0;
$i++;
// Extract the tab titles for use in the tab widget.
preg_match_all( '/tab title="([^\"]+)"/i', $content, $matches, PREG_OFFSET_CAPTURE );
$tab_titles = array();
if( isset($matches[1]) ){ $tab_titles = $matches[1]; }
$output = '';
if( count($tab_titles) ){
$output .= '<div id="visual-tabs-'. $i .'" class="visual-tabs"><div class="visual-tab-inner">';
$output .= '<ul class="visual-nav visual-clearfix">';
foreach( $tab_titles as $tab ){
$output .= '<li><a href="#visual-tab-'. sanitize_title( $tab[0] ) .'">' . $tab[0] . '</a></li>';
}
$output .= '</ul>';
$output .= do_shortcode( $content );
$output .= '</div></div>';
} else {
$output .= do_shortcode( $content );
}
return $output;
}
add_shortcode( 'visual_tabs', 'visual_tabs' );
}
if (!function_exists('visual_tab')) {
function visual_tab( $atts, $content = null ) {
// add script global
global $add_shortcode_scripts;
$add_shortcode_scripts = true;
$defaults = array( 'title' => 'Tab' );
extract( shortcode_atts( $defaults, $atts ) );
return '<div id="visual-tab-'. sanitize_title( $title ) .'" class="visual-tab">'. do_shortcode( $content ) .'</div>';
}
add_shortcode( 'visual_tab', 'visual_tab' );
}
?><file_sep> <div class="clear"></div>
</div><!-- end .rightContent -->
<script type="text/javascript">
(function() {
window._pa = window._pa || {};
// _pa.orderId = "myOrderId"; // OPTIONAL: attach unique conversion identifier to conversions
// _pa.revenue = "19.99"; // OPTIONAL: attach dynamic purchase values to conversions
// _pa.productId = "myProductId"; // OPTIONAL: Include product ID for use with dynamic ads
var pa = document.createElement('script'); pa.type = 'text/javascript'; pa.async = true;
pa.src = ('https:' == document.location.protocol ? 'https:' : 'http:') + "//tag.perfectaudience.com/serve/53ab5d8e36531ac20e00006f.js";
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(pa, s);
})();
</script>
<?php // copyright
$copy_text=get_option('vk_copy_text');
if($copy_text!='') { ?>
<div class="clear"></div>
<div class="copyright"><p style="font-size: 90%;"><?php echo $copy_text; ?></p></div>
<?php } ?>
<div class="clear"></div>
</div><!-- end rightPadding -->
<!-- iOS background helper -->
<div class="iosBackground"></div>
</div><!-- end rightContainer -->
<span id="directoryRef" data-directory="<?php echo get_template_directory_uri(); ?>" data-loading="<?php _e('LOADING','framework'); ?>"></span>
</div><!-- end #globalWrap -->
<?php wp_footer(); ?>
</body>
</html>
<?php
// create the output string
$output = ob_get_contents();
// end object
ob_end_clean();
// remove javascript comments
$output = preg_replace('/(?<!\S)\/\/\s*[^\r\n]*/', '', $output);
// remove whitespace
$output = join("\n", array_map("trim", explode("\n", $output)));
// remove tab spaces
$output = preg_replace('/ /', '', $output);
// remove double spaces (create single space)
$output = preg_replace('/ /', ' ', $output);
// remove empty lines
$output = preg_replace('/\n+/', " ", trim($output));
// compressed
echo $output;
?><file_sep><?php
/*
Plugin Name: Visualkicks - Shortcodes
Plugin URI: http://www.visualkicks.com/plugins/
Description: Enables a set of shortcodes usable in the visual post editor for use in our compatible Visualkicks themes
Version: 1.2
Author: Visualkicks
Author URI: http://www.visualkicks.com
Changelog
04/11/2013 Version 1.2
- added forced margin right to the buttons
- removed the eschtmltags from code shortcode
- all styles are now forced and themes should no longer need to override styling
- instead of using .hide and .show in the js we now set the css
- we have forced the default views for tabs now via css
- scripts are now only loaded if they are needed and also placed into the footer not the head
29/11/2013 Version 1.1
- buttons no longer container any styling for sizing
- buttons have their colors set with !important rule
- added the .button class to the visual buttons
/*-----------------------------------------------------------------------------------*/
if ( ! class_exists( 'Visualkicks_Shortcodes' ) ) :
class Visualkicks_Shortcodes {
function __construct() {
// get shortcodes.php
require_once( plugin_dir_path( __FILE__ ) .'shortcodes.php' );
// define paths
define('VISUAL_TINYMCE_URI', plugin_dir_url( __FILE__ ) .'tinymce');
define('VISUAL_TINYMCE_DIR', plugin_dir_path( __FILE__ ) .'tinymce');
// register
add_action('init', array(&$this, 'init'));
add_action('admin_init', array(&$this, 'admin_init'));
add_action('wp_footer', array(&$this, 'print_shortcode_scripts'));
}
/*--------------------------------------------------------------------*/
/*
/* Registers TinyMCE
/*
/*--------------------------------------------------------------------*/
function init() {
// admin conditions
if( ! is_admin() ) {
// enqueue styles
wp_register_style( 'visual-shortcodes', plugin_dir_url( __FILE__ ) . 'shortcodes.css' );
// register scripts
wp_register_script( 'visual-shortcodes-lib', plugin_dir_url( __FILE__ ) . 'js/visual-shortcodes-lib.js', array('jquery', 'jquery-ui-tabs'), '1.0', true );
}
// exit conditions
if ( ! current_user_can('edit_posts') && ! current_user_can('edit_pages') ) { return; }
// editor condtions
if ( get_user_option('rich_editing') == 'true' ) {
add_filter( 'mce_external_plugins', array(&$this, 'add_rich_plugins') );
add_filter( 'mce_buttons', array(&$this, 'register_rich_buttons') );
}
}
/*--------------------------------------------------------------------*/
/*
/* Print shortcode scripts in footer only if needed
/*
/*--------------------------------------------------------------------*/
function print_shortcode_scripts() {
// only print if shortcode is in use
global $add_shortcode_scripts;
if ( ! $add_shortcode_scripts ) {
return;
} else {
wp_print_styles('visual-shortcodes');
wp_print_scripts('jquery-ui-tabs');
wp_print_scripts('visual-shortcodes-lib');
}
}
/*--------------------------------------------------------------------*/
/*
/* Defins TinyMCE js plugin
/*
/*--------------------------------------------------------------------*/
function add_rich_plugins( $plugin_array ) {
$plugin_array['visualShortcodes'] = VISUAL_TINYMCE_URI . '/plugin.js';
return $plugin_array;
}
/*--------------------------------------------------------------------*/
/*
/* Adds TinyMCE buttons
/*
/*--------------------------------------------------------------------*/
function register_rich_buttons( $buttons ) {
array_push( $buttons, "|", 'visual_button' );
return $buttons;
}
/*--------------------------------------------------------------------*/
/*
/* Enqueue Scripts and Styles
/*
/*--------------------------------------------------------------------*/
function admin_init() {
// css
wp_enqueue_style( 'visual-popup', VISUAL_TINYMCE_URI . '/css/popup.css', false, '1.0', 'all' );
// js
wp_enqueue_script( 'jquery-ui-sortable' );
wp_enqueue_script( 'jquery-livequery', VISUAL_TINYMCE_URI . '/js/jquery.livequery.js', false, '1.1.1', false );
wp_enqueue_script( 'jquery-appendo', VISUAL_TINYMCE_URI . '/js/jquery.appendo.js', false, '1.0', false );
wp_enqueue_script( 'base64', VISUAL_TINYMCE_URI . '/js/base64.js', false, '1.0', false );
wp_enqueue_script( 'visual-popup', VISUAL_TINYMCE_URI . '/js/popup.js', false, '1.0', false );
// localize the plugins js
wp_localize_script( 'jquery', 'VisualShortcodes', array('plugin_folder' => WP_PLUGIN_URL .'/vk-shortcodes') );
}
}
$visual_shortcodes = new Visualkicks_Shortcodes();
endif;
?><file_sep><?php
/*
Plugin Name: Visualkicks - Social Widget
Plugin URI: http://www.visualkicks.com/plugins
Description: Enables a Social Icons widget for use in our compatible Visualkicks themes
Version: 1.2
Author: Visualkicks
Author URI: http://www.visualkicks.com
Changelog
12/01/2013 - Version 1.2
- all visualkick themes come with font awesome in their framework so we have removed it from the widget
- styelsheet is only printed if widget is actually in use
29/11/2013 - Version 1.1
- icon handle is now 'fa-'. All themes should use the same handle
- removed theme check classes "non-visualkicks" as it is no longer needed
- icon font set to 'FontAwesome' with !important rule
- added a -5px bottom margin to the widget container
/*-----------------------------------------------------------------------------------*/
// widget init
add_action( 'widgets_init', 'vk_social_widget' );
// register widget
function vk_social_widget() {
register_widget( 'VK_Social_Widget' );
}
/*-----------------------------------------------------------------------------------*/
/*
/* Register Styles
/*
/*-----------------------------------------------------------------------------------*/
function vk_social_register_scripts() {
// register
wp_register_style('widget-social', plugins_url( '/widget-social.css', __FILE__ ), 'all');
// only print if widget is in use
global $add_social_scripts;
if ( ! $add_social_scripts ) {
return;
} else {
// enqueue
wp_print_styles('widget-social');
}
}
add_action('wp_footer', 'vk_social_register_scripts'); // add plugin styles after the theme styles
/*-----------------------------------------------------------------------------------*/
/*
/* Widget Setup
/*
/*-----------------------------------------------------------------------------------*/
// widget class
class vk_social_widget extends WP_Widget {
function VK_Social_Widget() {
// settings
$widget_ops = array(
'classname' => 'vk_social_widget',
'description' => __('A widget that displays a set buttons that link to your social profiles.', 'framework')
);
// control
$control_ops = array(
'width' => 300,
'height' => 350,
'id_base' => 'vk_social_widget'
);
// create
$this->WP_Widget( 'vk_social_widget', __('Social Icons', 'framework'), $widget_ops, $control_ops );
}
/*-----------------------------------------------------------------------------------*/
/*
/* Display Widget
/*
/*-----------------------------------------------------------------------------------*/
// widget output
function widget( $args, $instance ) {
extract( $args );
// variables
$title = apply_filters('widget_title', $instance['title'] );
$android = $instance['android'];
$apple = $instance['apple'];
$dribbble = $instance['dribbble'];
$dropbox = $instance['dropbox'];
$facebook = $instance['facebook'];
$flickr = $instance['flickr'];
$foursquare = $instance['foursquare'];
$github = $instance['github'];
$google = $instance['google'];
$instagram = $instance['instagram'];
$linkedin = $instance['linkedin'];
$pinterest = $instance['pinterest'];
$renren = $instance['renren'];
$skype = $instance['skype'];
$trello = $instance['trello'];
$tumblr = $instance['tumblr'];
$twitter = $instance['twitter'];
$vimeo = $instance['vimeo'];
$vk = $instance['vk'];
$weibo = $instance['weibo'];
$windows = $instance['windows'];
$xing = $instance['xing'];
$youtube = $instance['youtube'];
$rss = $instance['rss'];
// add script global
global $add_social_scripts;
$add_social_scripts = true;
// widget before
echo $before_widget;
// widget title
if ( $title ) { echo $before_title . $title . $after_title; } ?>
<div class="social-widget">
<?php if($android!='') { ?><a href="<?php echo $android; ?>" class="button icon android fa-android"></a><?php } ?>
<?php if($apple!='') { ?><a href="<?php echo $apple; ?>" class="button icon apple fa-apple"></a><?php } ?>
<?php if($dribbble!='') { ?><a href="<?php echo $dribbble; ?>" class="button icon dribbble fa-dribbble"></a><?php } ?>
<?php if($dropbox!='') { ?><a href="<?php echo $dropbox; ?>" class="button icon dropbox fa-dropbox"></a><?php } ?>
<?php if($facebook!='') { ?><a href="<?php echo $facebook; ?>" class="button icon facebook fa-facebook"></a><?php } ?>
<?php if($flickr!='') { ?><a href="<?php echo $flickr; ?>" class="button icon flickr fa-flickr"></a><?php } ?>
<?php if($foursquare!='') { ?><a href="<?php echo $foursquare; ?>" class="button icon foursquare fa-foursquare"></a><?php } ?>
<?php if($github!='') { ?><a href="<?php echo $github; ?>" class="button icon github fa-github"></a><?php } ?>
<?php if($google!='') { ?><a href="<?php echo $google; ?>" class="button icon google fa-google-plus"></a><?php } ?>
<?php if($instagram!='') { ?><a href="<?php echo $instagram; ?>" class="button icon instagram fa-instagram"></a><?php } ?>
<?php if($linkedin!='') { ?><a href="<?php echo $linkedin; ?>" class="button icon linkedin fa-linkedin"></a><?php } ?>
<?php if($pinterest!='') { ?><a href="<?php echo $pinterest; ?>" class="button icon pinterest fa-pinterest"></a><?php } ?>
<?php if($renren!='') { ?><a href="<?php echo $renren; ?>" class="button icon renren fa-renren"></a><?php } ?>
<?php if($skype!='') { ?><a href="<?php echo $skype; ?>" class="button icon skype fa-skype"></a><?php } ?>
<?php if($trello!='') { ?><a href="<?php echo $trello; ?>" class="button icon trello fa-trello"></a><?php } ?>
<?php if($tumblr!='') { ?><a href="<?php echo $tumblr; ?>" class="button icon tumblr fa-tumblr"></a><?php } ?>
<?php if($twitter!='') { ?><a href="<?php echo $twitter; ?>" class="button icon twitter fa-twitter"></a><?php } ?>
<?php if($vimeo!='') { ?><a href="<?php echo $vimeo; ?>" class="button icon vimeo fa-vimeo-square"></a><?php } ?>
<?php if($vk!='') { ?><a href="<?php echo $vk; ?>" class="button icon vk fa-vk"></a><?php } ?>
<?php if($weibo!='') { ?><a href="<?php echo $weibo; ?>" class="button icon weibo fa-weibo"></a><?php } ?>
<?php if($windows!='') { ?><a href="<?php echo $windows; ?>" class="button icon windows fa-windows"></a><?php } ?>
<?php if($xing!='') { ?><a href="<?php echo $xing; ?>" class="button icon xing fa-xing"></a><?php } ?>
<?php if($youtube!='') { ?><a href="<?php echo $youtube; ?>" class="button icon youtube fa-youtube"></a><?php } ?>
<?php if($rss!='') { ?><a href="<?php echo $rss; ?>" class="button icon rss fa-rss"></a><?php } ?>
</div>
<?php // widget after
echo $after_widget;
}
/*-----------------------------------------------------------------------------------*/
/*
/* Update Widget
/*
/*-----------------------------------------------------------------------------------*/
function update( $new_instance, $old_instance ) {
$instance = $old_instance;
$instance['title'] = strip_tags( $new_instance['title'] );
$instance['android'] = strip_tags( $new_instance['android'] );
$instance['apple'] = strip_tags( $new_instance['apple'] );
$instance['dribbble'] = strip_tags( $new_instance['dribbble'] );
$instance['dropbox'] = strip_tags( $new_instance['dropbox'] );
$instance['facebook'] = strip_tags( $new_instance['facebook'] );
$instance['flickr'] = strip_tags( $new_instance['flickr'] );
$instance['foursquare'] = strip_tags( $new_instance['foursquare'] );
$instance['github'] = strip_tags( $new_instance['github'] );
$instance['google'] = strip_tags( $new_instance['google'] );
$instance['instagram'] = strip_tags( $new_instance['instagram'] );
$instance['linkedin'] = strip_tags( $new_instance['linkedin'] );
$instance['pinterest'] = strip_tags( $new_instance['pinterest'] );
$instance['renren'] = strip_tags( $new_instance['renren'] );
$instance['skype'] = strip_tags( $new_instance['skype'] );
$instance['trello'] = strip_tags( $new_instance['trello'] );
$instance['tumblr'] = strip_tags( $new_instance['tumblr'] );
$instance['twitter'] = strip_tags( $new_instance['twitter'] );
$instance['vimeo'] = strip_tags( $new_instance['vimeo'] );
$instance['vk'] = strip_tags( $new_instance['vk'] );
$instance['weibo'] = strip_tags( $new_instance['weibo'] );
$instance['windows'] = strip_tags( $new_instance['windows'] );
$instance['xing'] = strip_tags( $new_instance['xing'] );
$instance['youtube'] = strip_tags( $new_instance['youtube'] );
$instance['rss'] = strip_tags( $new_instance['rss'] );
return $instance;
}
/*-----------------------------------------------------------------------------------*/
/*
/* Widget Settings
/*
/*-----------------------------------------------------------------------------------*/
function form( $instance ) {
// defaults
$defaults = array(
'title' => 'Social Icons',
'android' => '',
'apple' => '',
'dribbble' => '',
'dropbox' => '',
'facebook' => '',
'flickr' => '',
'foursquare' => '',
'github' => '',
'google' => '',
'instagram' => '',
'linkedin' => '',
'pinterest' => '',
'renren' => '',
'skype' => '',
'trello' => '',
'tumblr' => '',
'twitter' => '',
'vimeo' => '',
'vk' => '',
'weibo' => '',
'windows' => '',
'xing' => '',
'youtube' => '',
'rss' => '',
);
$instance = wp_parse_args( (array) $instance, $defaults ); ?>
<p>
<label for="<?php echo $this->get_field_id( 'title' ); ?>"><?php _e('Title:', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'title' ); ?>" name="<?php echo $this->get_field_name( 'title' ); ?>" value="<?php echo $instance['title']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'android' ); ?>"><?php _e('android', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'android' ); ?>" name="<?php echo $this->get_field_name( 'android' ); ?>" value="<?php echo $instance['android']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'apple' ); ?>"><?php _e('apple', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'apple' ); ?>" name="<?php echo $this->get_field_name( 'apple' ); ?>" value="<?php echo $instance['apple']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'dribbble' ); ?>"><?php _e('dribbble', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'dribbble' ); ?>" name="<?php echo $this->get_field_name( 'dribbble' ); ?>" value="<?php echo $instance['dribbble']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'dropbox' ); ?>"><?php _e('dropbox', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'dropbox' ); ?>" name="<?php echo $this->get_field_name( 'dropbox' ); ?>" value="<?php echo $instance['dropbox']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'facebook' ); ?>"><?php _e('facebook', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'facebook' ); ?>" name="<?php echo $this->get_field_name( 'facebook' ); ?>" value="<?php echo $instance['facebook']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'flickr' ); ?>"><?php _e('flickr', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'flickr' ); ?>" name="<?php echo $this->get_field_name( 'flickr' ); ?>" value="<?php echo $instance['flickr']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'foursquare' ); ?>"><?php _e('foursquare', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'foursquare' ); ?>" name="<?php echo $this->get_field_name( 'foursquare' ); ?>" value="<?php echo $instance['foursquare']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'github' ); ?>"><?php _e('github', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'github' ); ?>" name="<?php echo $this->get_field_name( 'github' ); ?>" value="<?php echo $instance['github']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'google' ); ?>"><?php _e('google', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'google' ); ?>" name="<?php echo $this->get_field_name( 'google' ); ?>" value="<?php echo $instance['google']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'instagram' ); ?>"><?php _e('instagram', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'instagram' ); ?>" name="<?php echo $this->get_field_name( 'instagram' ); ?>" value="<?php echo $instance['instagram']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'linkedin' ); ?>"><?php _e('linkedin', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'linkedin' ); ?>" name="<?php echo $this->get_field_name( 'linkedin' ); ?>" value="<?php echo $instance['linkedin']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'pinterest' ); ?>"><?php _e('pinterest', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'pinterest' ); ?>" name="<?php echo $this->get_field_name( 'pinterest' ); ?>" value="<?php echo $instance['pinterest']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'renren' ); ?>"><?php _e('renren', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'renren' ); ?>" name="<?php echo $this->get_field_name( 'renren' ); ?>" value="<?php echo $instance['renren']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'skype' ); ?>"><?php _e('skype', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'skype' ); ?>" name="<?php echo $this->get_field_name( 'skype' ); ?>" value="<?php echo $instance['skype']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'trello' ); ?>"><?php _e('trello', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'trello' ); ?>" name="<?php echo $this->get_field_name( 'trello' ); ?>" value="<?php echo $instance['trello']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'tumblr' ); ?>"><?php _e('tumblr', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'tumblr' ); ?>" name="<?php echo $this->get_field_name( 'tumblr' ); ?>" value="<?php echo $instance['tumblr']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'twitter' ); ?>"><?php _e('twitter', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'twitter' ); ?>" name="<?php echo $this->get_field_name( 'twitter' ); ?>" value="<?php echo $instance['twitter']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'vimeo' ); ?>"><?php _e('vimeo', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'vimeo' ); ?>" name="<?php echo $this->get_field_name( 'vimeo' ); ?>" value="<?php echo $instance['vimeo']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'vk' ); ?>"><?php _e('vk', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'vk' ); ?>" name="<?php echo $this->get_field_name( 'vk' ); ?>" value="<?php echo $instance['vk']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'weibo' ); ?>"><?php _e('weibo', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'weibo' ); ?>" name="<?php echo $this->get_field_name( 'weibo' ); ?>" value="<?php echo $instance['weibo']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'windows' ); ?>"><?php _e('windows', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'windows' ); ?>" name="<?php echo $this->get_field_name( 'windows' ); ?>" value="<?php echo $instance['windows']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'xing' ); ?>"><?php _e('xing', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'xing' ); ?>" name="<?php echo $this->get_field_name( 'xing' ); ?>" value="<?php echo $instance['xing']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'youtube' ); ?>"><?php _e('youtube', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'youtube' ); ?>" name="<?php echo $this->get_field_name( 'youtube' ); ?>" value="<?php echo $instance['youtube']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'rss' ); ?>"><?php _e('rss', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'rss' ); ?>" name="<?php echo $this->get_field_name( 'rss' ); ?>" value="<?php echo $instance['rss']; ?>" />
</p>
<?php
}
}
?><file_sep>jQuery(document).ready(function($) { // start jquery scripts (Don't delete this)
var custom_uploader;
$('.upload_button').click(function(e) {
e.preventDefault();
// button
thisbutton = $(this);
// textfield
textfield = thisbutton.siblings('input');
// thumbnail
thumbnail = thisbutton.parent().parent().find('img');
//If the uploader object has already been created, reopen the dialog
if (custom_uploader) {
custom_uploader.open();
return;
}
//Extend the wp.media object
custom_uploader = wp.media.frames.file_frame = wp.media({
title: 'Select Favicon Image',
button: {
text: 'Select Image'
},
multiple: false
});
//When a file is selected, grab the URL and set it as the text field's value
custom_uploader.on('select', function() {
attachment = custom_uploader.state().get('selection').first().toJSON();
textfield.val(attachment.url);
thumbnail.attr('src',attachment.url);
});
//Open the uploader dialog
custom_uploader.open();
});
}); // end jquery scripts (Don't delete this)
<file_sep>var mvnAlgoliaPrediction = (function($) {
var algolia;
var that;
var algoliaQueries = [];
var self = {
initialize: function() {
that = this;
if (typeof AlgoliaSearch !== "undefined") {
this.initAlgolia();
}
},
initAlgolia: function() {
algolia = new AlgoliaSearch( mvnAlgSettings.appId, mvnAlgSettings.apiKeySearch ); // public credentials
},
indexTaxonomies: function() {
if( typeof mvnAlgSettings.indexTaxonomies !== 'undefined'
&& parseInt( mvnAlgSettings.indexTaxonomies ) === 1
&& typeof mvnAlgSettings.taxonomiesToIndex !== 'undefined'
&& mvnAlgSettings.taxonomiesToIndex ){
return true;
}
return false;
},
searchCallback: function(success, content, response) {
var data = [];
var resultIndex = jQuery.inArray( 'posts', algoliaQueries );
if ( success && content.results.length > 0 && that.lastQuery === content.results[resultIndex].query ) { // do not consider outdated answers
var posts = content.results[resultIndex];
if( posts.hits.length > 0 ){
for (var i = 0; i < posts.hits.length; ++i) {
var hit = posts.hits[i];
var hitInfo = {
label: hit.title,
value: hit.title,
title: (hit._highlightResult.title && hit._highlightResult.title.value) || hit.title,
permalink: hit.permalink,
categories: hit.category,
tags: hit._tags,
excerpt: (hit._highlightResult.excerpt && hit._highlightResult.excerpt.value) || hit.excerpt,
description: (hit._highlightResult.content && hit._highlightResult.content.value) || hit.content,
date: hit.date,
featuredImage: hit.featuredImage,
category: (mvnAlgSettings.labels.posts) ? mvnAlgSettings.labels.posts : '' // Posts LABEL
};
data.push( hitInfo );
}
}
}
if( self.indexTaxonomies() ){
jQuery.each(mvnAlgSettings.taxonomiesToIndex, function(index, element){
resultIndex = jQuery.inArray( index, algoliaQueries );
var terms = content.results[resultIndex];
if( terms.hits.length > 0 ){
for (var i = 0; i < terms.hits.length; ++i) {
var hit = terms.hits[i];
var hitInfo = {
label: hit.title,
value: hit.objectID,
title: (hit._highlightResult.title && hit._highlightResult.title.value) || hit.title,
permalink: hit.permalink,
featuredImage: hit.image,
termId: hit.termId,
parent: hit.parent,
postsRelated: hit.postsRelated,
taxonomy: hit.taxonomy,
category: (mvnAlgSettings.labels.taxonomies[index]) ? mvnAlgSettings.labels.taxonomies[index] : '' // TAXONOMY LABEL
};
data.push( hitInfo );
}
}
});
}
response(data);
},
getDisplayPost: function( hit ) {
var htmlPost = '';
htmlPost += ' <a href="' + hit.permalink + '" class="mvn-alg-ls-item-title">';
if( typeof hit.featuredImage !== 'undefined' && hit.featuredImage ){
html += ' <img src="'+hit.featuredImage.file+'" width="40" height="60" />';
}
htmlPost += ' <strong>' + hit.title + '</strong>';
if( typeof hit.categories !== 'undefined' ){
htmlPost += ' <br /><span class="mvn-alg-ls-item-cats">' + hit.categories.join() + '</span>';
}
if( mvnAlgSettings.showExcerpt && typeof hit.excerpt !== 'undefined' && hit.excerpt ){
htmlPost += ' <br /><span class="mvn-alg-ls-item-desc">' + hit.excerpt + '</span>';
}
htmlPost += ' </a>';
return htmlPost;
},
getDisplayTerm: function( hit ) {
var html = '';
html += '<a href="' + hit.permalink + '" class="mvn-alg-ls-item-title">';
if( typeof hit.featuredImage !== 'undefined' && hit.featuredImage ){
html += ' <img src="'+hit.featuredImage+'" width="40" height="60" />';
}
html += ' <strong>' + hit.title + '</strong>';
html += '</a>';
return html;
},
search: function( request, response ) {
if( typeof algolia !== 'undefined' ){
algoliaQueries = [];
algolia.startQueriesBatch();
algolia.addQueryInBatch( mvnAlgSettings.indexName, request.term, {
attributesToRetrieve: ['objectID', 'title', 'permalink', 'excerpt', 'content', 'date', 'featuredImage' , 'category', '_tags'],
hitsPerPage: mvnAlgSearchVars.postsPerPage
});
algoliaQueries.push( 'posts' );
if( self.indexTaxonomies() ){
jQuery.each(mvnAlgSettings.taxonomiesToIndex, function(index, element){
if( typeof element.indexName !== 'undefined' && element.indexName ){
algolia.addQueryInBatch( element.indexName, request.term, {
hitsPerPage: mvnAlgSearchVars.postsPerPage
});
algoliaQueries.push( index );
}
});
}
algolia.sendQueriesBatch(function(success, content) {
// forward 'response' to Algolia's callback in order to call it with up-to-date results
that.lastQuery = request.term;
that.searchCallback(success, content, response);
});
}
}
};
return self;
})(jQuery);
jQuery(document).ready(function($) {
mvnAlgoliaPrediction.initialize();
// The autocomplete function is called on the input textbox with id input_element
$("input[name='" + mvnAlgSearchVars.inputSearchName + "']").each(function(index){
$(this).autocomplete({
// minLength is the minimal number of input characters before starting showing
// the autocomplete
minLength: 1,
source: mvnAlgoliaPrediction.search,
// This function is executed when a suggestion is selected
select: function(event, ui) {
// Sets the text of the input textbox to the title of the object referenced
// by the selected list item
$(this).val(ui.item.label);
return false;
}
// Here we alter the standard behavior when rendering items in the list
});
$(this).autocomplete().data("ui-autocomplete")._renderItem = function(ul, item) {
// ul is the unordered suggestion list
// item is a object in the data object that was send to the response function
// after the JSON request
// We append a custom formatted list item to the suggestion list
var itemHtml = '';
if( typeof item.taxonomy !== 'undefined' ){
itemHtml = mvnAlgoliaPrediction.getDisplayTerm(item);
}else{
itemHtml = mvnAlgoliaPrediction.getDisplayPost(item);
}
return $("<li></li>").data("item.autocomplete", item).append(itemHtml).appendTo(ul);
};
// Render menu just if index taxonomies is enabled
if( typeof mvnAlgSettings.indexTaxonomies !== 'undefined' && mvnAlgSettings.indexTaxonomies > 0 && typeof mvnAlgSettings.taxonomiesToIndex !== 'undefined' ){
$(this).autocomplete().data("ui-autocomplete")._renderMenu = function(ul, items) {
var that = this,
currentCategory = "";
$.each(items, function(index, item) {
if ( item.category && item.category !== currentCategory) {
ul.append("<li class='ui-autocomplete-category'><span>" + item.category + "</span></li>");
currentCategory = item.category;
}
that._renderItemData(ul, item);
});
};
}
});
});<file_sep><?php
/*
Plugin Name: Visualkicks - Dribbble Widget
Plugin URI: http://www.visualkicks.com/plugins
Description: Enables a Dribbble widget for use in our compatible Visualkicks themes
Version: 1.1
Author: Visualkicks
Author URI: http://www.visualkicks.com
Changelog
04/01/2014 Version 1.1
- removed the styling from the shortcode as it should be handled by the theme
- scripts are now only loaded if they are needed
/*-----------------------------------------------------------------------------------*/
// widget init
add_action( 'widgets_init', 'vk_dribbble_widget' );
// register widget
function vk_dribbble_widget() {
register_widget( 'VK_Dribbble_Widget' );
}
/*-----------------------------------------------------------------------------------*/
/*
/* Register Scripts
/*
/*-----------------------------------------------------------------------------------*/
function vk_dribble_register_scripts() {
// register
wp_register_script('widget-dribbble', plugins_url( '/jquery.jribbble.min.js', __FILE__ ), 'jquery', '1.0', true);
}
add_action('wp_enqueue_scripts', 'vk_dribble_register_scripts');
/*-----------------------------------------------------------------------------------*/
/*
/* Print Scripts
/*
/*-----------------------------------------------------------------------------------*/
function vk_dribble_print_scripts() {
// only print if widget is in use
global $add_dribbble_scripts;
if ( ! $add_dribbble_scripts ) {
return;
} else {
// print
wp_print_scripts('widget-dribbble');
}
}
add_action('wp_footer', 'vk_dribble_print_scripts');
/*-----------------------------------------------------------------------------------*/
/*
/* Widget Setup
/*
/*-----------------------------------------------------------------------------------*/
// widget class
class vk_dribbble_widget extends WP_Widget {
function VK_Dribbble_Widget() {
// settings
$widget_ops = array(
'classname' => 'vk_dribbble_widget',
'description' => __('A widget that displays your latest dribbble shots.', 'framework')
);
// control
$control_ops = array(
'width' => 300,
'height' => 350,
'id_base' => 'vk_dribbble_widget'
);
// create
$this->WP_Widget( 'vk_dribbble_widget', __('Dribbble Shots', 'framework'), $widget_ops, $control_ops );
}
/*-----------------------------------------------------------------------------------*/
/*
/* Display Widget
/*
/*-----------------------------------------------------------------------------------*/
function widget( $args, $instance ) {
extract( $args );
// title
$title = apply_filters('widget_title', $instance['title'] );
// username
$vk_dribbble_username = $instance['username'];
// postcount
$vk_dribbble_postcount = $instance['postcount'];
// add script global
global $add_dribbble_scripts;
$add_dribbble_scripts = true;
// widget before
echo $before_widget;
// widget title
if ( $title ) { echo $before_title . $title . $after_title; }
$id = rand(0,999); ?>
<script type="text/javascript">
jQuery(document).ready(function($){
$.jribbble.getShotsByPlayerId('<?php echo $vk_dribbble_username; ?>', function (playerShots) {
var html = [];
$.each(playerShots.shots, function (i, shot) {
html.push('<div><a href="' + shot.url + '"><img src="' + shot.image_url + '" alt="' + shot.title + '"></a></div>');
});
$('#dribbble_<?php echo $id; ?>').html(html.join(''));
}, { page: 1, per_page: '<?php echo $vk_dribbble_postcount; ?>' });
});
</script>
<div id="dribbble_<?php echo $id; ?>" class="dribbble"></div>
<div class="clear"></div><?php
// widget after
echo $after_widget;
}
/*-----------------------------------------------------------------------------------*/
/*
/* Update Widget
/*
/*-----------------------------------------------------------------------------------*/
function update( $new_instance, $old_instance ) {
$instance = $old_instance;
$instance['title'] = strip_tags( $new_instance['title'] );
$instance['username'] = strip_tags( $new_instance['username'] );
$instance['postcount'] = strip_tags( $new_instance['postcount'] );
return $instance;
}
/*-----------------------------------------------------------------------------------*/
/*
/* Widget Settings
/*
/*-----------------------------------------------------------------------------------*/
function form( $instance ) {
// defaults
$defaults = array(
'title' => 'Latest Dribbble Shots',
'username' => 'envato',
'postcount' => '1',
);
$instance = wp_parse_args( (array) $instance, $defaults ); ?>
<p>
<label for="<?php echo $this->get_field_id( 'title' ); ?>"><?php _e('Title:', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'title' ); ?>" name="<?php echo $this->get_field_name( 'title' ); ?>" value="<?php echo $instance['title']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'username' ); ?>"><?php _e('Dribbble Username:', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'username' ); ?>" name="<?php echo $this->get_field_name( 'username' ); ?>" value="<?php echo $instance['username']; ?>" />
</p>
<p>
<label for="<?php echo $this->get_field_id( 'postcount' ); ?>"><?php _e('Number of Shots:', 'framework') ?></label>
<input type="text" class="widefat" id="<?php echo $this->get_field_id( 'postcount' ); ?>" name="<?php echo $this->get_field_name( 'postcount' ); ?>" value="<?php echo $instance['postcount']; ?>" />
</p>
<?php
}
}
?><file_sep><?php
namespace MavenAlgolia\Core;
class Indexer {
private $apiKey = "";
private $appId = "";
/**
*
* @param string $indexName
* @param string $apiKey
* @param string $appId
* @throws \Exception
*/
public function __construct ( $appId, $apiKey ) {
/**
* Help documentation
* https://github.com/algolia/algoliasearch-client-php#indexing-parameters
*/
// self::$index = array(
// 'my_index' => array(
// 'attributesToIndex' => array(),
// 'attributesForFaceting' => array(),
// 'attributeForDistinct' => '',
// 'ranking' => array(),
// 'customRanking' => array(),
// 'queryType' => 'prefixLast',
// 'slaves' => array(),
// 'master_index' => ''
// )
// );
if ( !$apiKey || !$appId ) {
throw new \Exception( 'Missing or Invalid credentials' );
}
$this->apiKey = $apiKey;
$this->appId = $appId;
}
public function getApiKey () {
return $this->apiKey;
}
public function getAppId () {
return $this->appId;
}
/**
* Move an existing index.
* @param tmpIndexName the name of index to copy.
* @param indexName the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist).
* @return boolean
* @throws \MavenAlgolia\Core\Exception
*/
public function moveIndex( $tmpIndexName, $indexName ) {
if( !empty( $tmpIndexName ) && !empty( $indexName ) ){
// initialize API Client & Index
$client = new \AlgoliaSearch\Client( $this->getAppId(), $this->getApiKey() );
try {
$client->moveIndex( $tmpIndexName, $indexName );
return true;
} catch ( \Exception $exc ) {
throw $exc;
}
}
return false;
}
/**
* Index a single object
* @param string $indexName
* @param array $object
* @return boolean
* @throws \MavenAlgolia\Core\Exception
*/
public function indexObject( $indexName, $object ) {
if( !isset( $object['objectID'] ) ){
return false;
}
// initialize API Client & Index
$client = new \AlgoliaSearch\Client( $this->getAppId(), $this->getApiKey() );
$index = $client->initIndex( $indexName );
try {
// object contains the object to save
// the object must contains an objectID attribute
$index->saveObject( $object );
return true;
} catch ( \Exception $exc ) {
throw $exc;
}
return false;
}
/**
* Index multiples objects
* @param string $indexName
* @param array $objects
* @return boolean
* @throws \MavenAlgolia\Core\Exception
*/
public function indexObjects( $indexName, $objects ) {
// initialize API Client & Index
$client = new \AlgoliaSearch\Client( $this->getAppId(), $this->getApiKey() );
$index = $client->initIndex( $indexName );
try {
// object contains the object to save
// the object must contains an objectID attribute
$index->saveObjects( $objects );
return true;
} catch ( \Exception $exc ) {
throw $exc;
}
return false;
}
/**
* Remove a single object from the index
* @param string $indexName
* @param integer $objectId
* @return boolean
* @throws \MavenAlgolia\Core\Exception
*/
public function deleteObject( $indexName, $objectId ) {
// initialize API Client & Index
$client = new \AlgoliaSearch\Client( $this->getAppId(), $this->getApiKey() );
$index = $client->initIndex( $indexName );
try {
// Remove objects
$index->deleteObject( $objectId );
return true;
} catch ( \Exception $exc ) {
throw $exc;
}
return false;
}
/**
* Remove multiple objects from the index
* @param string $indexName
* @param integer $objectIds
* @return boolean
* @throws \MavenAlgolia\Core\Exception
*/
public function deleteObjects( $indexName, $objectIds ) {
// initialize API Client & Index
$client = new \AlgoliaSearch\Client( $this->getAppId(), $this->getApiKey() );
$index = $client->initIndex( $indexName );
try {
// Remove objects
$index->deleteObjects( $objectIds );
return true;
} catch ( \Exception $exc ) {
throw $exc;
}
return false;
}
/*
* ------------------------------------------------------------
* POSTS SECTION
* ------------------------------------------------------------
*/
/**
* Convert WP post object to Algolia format
* @global \MavenAlgolia\Core\type $wpdb
* @param \WP_Post $post
* @param Domain\PostType|string $type
* @return array
*/
public function postToAlgoliaObject( $post, $type = null ) {
global $wpdb;
if( empty( $type ) && !empty( $post->post_type ) ){
$type = $post->post_type;
}
if( is_string( $type ) ){
// TODO: Implement a better way to do this, maybe setting the post objects as a class attribute
$postObjects = FieldsHelper::getPostTypesObject();
if( isset( $postObjects[$type] ) ){
$type = $postObjects[$type];
}else{
// If the post type object doesn't exist return an empty array
return array();
}
}
// select the identifier of this row
$row = array();
// Index WP Post table fields
$fields = $type->getFields();
if( is_array( $fields ) && !empty( $fields ) ){
foreach( $fields as $field ){
if( isset( $post->{$field->getId()} ) ){
$row[ $field->getLabel() ] = FieldsHelper::formatFieldValue( $post->{$field->getId()}, $field->getType() );
}
}
unset( $field );
}
unset( $fields );
// Index WP Compound fields
$compoundFields = $type->getCompoundFields();
if( is_array( $compoundFields ) && !empty( $compoundFields ) ){
foreach( $compoundFields as $compoundField ){
$row[ $compoundField->getLabel() ] = FieldsHelper::getCompoundFieldValue( $post, $compoundField->getId() );
}
unset( $compoundField );
}
unset( $compoundFields );
// Index WP Post meta fields
$metaFields = $type->getMetaFields();
if( is_array( $metaFields ) && !empty( $metaFields ) ){
foreach( $metaFields as $metaField ){
$metaValue = get_post_meta( $post->ID, $metaField->getId(), $metaField->isSingle() );
if( $metaValue !== FALSE ){
if( !is_array( $metaValue ) ){
$metaValue = FieldsHelper::formatFieldValue( $metaValue, $metaField->getType() );
}
$row[ $metaField->getLabel() ] = $metaValue;
}
}
unset( $metaValue );
unset( $metaField );
}
unset( $metaFields );
// Index WP Taxonomies
$taxonomies = $type->getTaxonomies();
if( is_array( $taxonomies ) && !empty( $taxonomies ) ){
$tags = array();
$termNames = array();
foreach( $taxonomies as $taxonomy ){
$termNames = wp_get_post_terms( $post->ID, $taxonomy->getId(), array('fields' => 'names') );
if( is_array( $termNames ) && !empty( $termNames ) ){
// $termNames = array_map( 'utf8_encode', $termNames );
if( $taxonomy->isTag() ){
$tags = array_unique(array_merge($tags, $termNames));
}
if( $taxonomy->forFaceting() ){
$row[ $taxonomy->getLabel() ] = $termNames;
}
}
// if there is no terms create the field with empty values
elseif( $taxonomy->forFaceting() ){
$row[ $taxonomy->getLabel() ] = array();
}
}
// we need to update the _tags field here since they are a single list and they are not divided by "group/type"
if( is_array( $tags ) && !empty( $tags ) ){
$row[ '_tags' ] = $tags;
}
unset( $termNames );
unset( $taxonomy );
unset( $tags );
}
unset( $taxonomies );
// Index featured image if it was configured so
$postThumbId = 0;
if( $type->indexFeaturedImage() ){
if( has_post_thumbnail( $post->ID ) ){
$postThumbId = get_post_thumbnail_id( $post->ID );
$row['featuredImage'] = $this->getImage( $postThumbId );
} else {
$row['featuredImage'] = '';
}
}
// Index WP media
$mediaTypes = $type->getMediaTypes();
if( is_array( $mediaTypes ) && !empty( $mediaTypes ) ){
$tags = array();
//TODO: implement different methods or ways to index audio, videos and other files
foreach( $mediaTypes as $mediaType ){
// For now we just support images
if( $mediaType !== 'image' ){
continue;
}
// Index WP media
$whereExclude = '';
// Exclude featured image if it is indexed separately
if( !empty( $postThumbId ) ){
$whereExclude = " AND ID != {$postThumbId} ";
}
$query = $wpdb->prepare( "SELECT ID FROM {$wpdb->posts} WHERE post_type = 'attachment' AND post_parent = %d AND post_mime_type LIKE %s {$whereExclude} ORDER BY menu_order ASC", $post->ID, $mediaType . '%' );
$children = $wpdb->get_col( $query );
if( is_array( $children ) && !empty( $children ) ){
$mediaFields = array();
foreach ( $children as $childId ) {
$mediaFields[] = $this->getImage( $childId );
}
$row[ $mediaType ] = $mediaFields;
unset($childId);
}else{
$row[ $mediaType ] = array();
}
}
unset($mediaFields);
unset( $children );
unset( $query );
unset( $mediaType );
}
unset( $mediaTypes );
return $row;
}
/**
*
* @global type $wpdb
* @param int $attachId Attachment Post ID
* @return array Image information
*/
public function getImage( $attachId ) {
global $wpdb;
if( empty($attachId) ){
return array();
}
$uploadDir = wp_upload_dir();
$uploadBaseUrl = $uploadDir['baseurl'];
$image['ID'] = $attachId;
//we will need to get the ALT info from Metas
$image['alt'] = get_post_meta( $attachId, '_wp_attachment_image_alt', TRUE );
$query = $wpdb->prepare( "SELECT post_title, post_content, post_excerpt, post_mime_type FROM {$wpdb->posts} WHERE ID = %d", $attachId );
$attachment = $wpdb->get_row( $query );
if( $attachment ){
$image['title'] = $attachment->post_title;
$image['description'] = $attachment->post_content;
$image['caption'] = $attachment->post_excerpt;
$image['mime_type'] = $attachment->post_mime_type;
}
unset( $query );
unset( $attachment );
$attachmentMeta = get_post_meta( $attachId, '_wp_attachment_metadata', TRUE );
if( is_array($attachmentMeta) && !empty( $attachmentMeta ) ){
$image['width'] = $attachmentMeta['width'];
$image['height'] = $attachmentMeta['height'];
$image['file'] = sprintf('%s/%s', $uploadBaseUrl, $attachmentMeta['file'] );
$image['sizes'] = $attachmentMeta['sizes'];
if( isset( $image['sizes'] ) && is_array( $image['sizes'] ) ){
$sizesToIndex = apply_filters( 'ma_image_sizes_to_index', array('thumbnail', 'medium', 'large') );
foreach ( $image['sizes'] as $size => &$sizeAttrs ) {
if( !in_array( $size, $sizesToIndex ) ){
unset( $image['sizes'][$size] );
continue;
}
if( isset( $sizeAttrs['file'] ) && $sizeAttrs['file'] ){
$baseFileUrl = str_replace( wp_basename($attachmentMeta['file']), '', $attachmentMeta['file']);
$sizeAttrs['file'] = sprintf( '%s/%s%s', $uploadBaseUrl, $baseFileUrl, $sizeAttrs['file']);
}
}
}
unset($attachmentMeta);
}
return $image;
}
/**
*
* @global type $wpdb
* @param string $indexName
* @param \Maven\Core\Domain\PostType[] $types
* @param int $postsPerPage How many posts per page
* @param int $offset Where to start
* @return void
* @throws \Exception
*/
public function removeIndexData ( $indexName, $types, $postsPerPage = -1, $offset = 0 ) {
// WE will use $wpdb to make the calls faster
global $wpdb;
if ( ! $indexName ) {
throw new \Exception( 'Missing or Invalid Index Name' );
}
if( !is_array( $types ) ){
$types = array( $types );
}
$postTypes = implode( "','", $types );
$postStatuses = implode( "','", array_diff( get_post_stati( array( 'show_in_admin_status_list' => TRUE ) ), array( 'publish' ) ) );
$limit = '';
if( (int)$postsPerPage > 0 ){
$limit = sprintf( "LIMIT %d, %d", $offset, $postsPerPage );
}
$query = "SELECT ID FROM {$wpdb->posts} WHERE post_status IN ('{$postStatuses}') AND post_type IN ( '{$postTypes}' ) {$limit}";
$posts = $wpdb->get_results( $query );
$totalRemoved = 0;
if ( $posts ) {
$batch = array();
// iterate over results and send them by batch of 10000 elements
foreach ( $posts as $post ) {
// select the identifier of this row
array_push( $batch, $post->ID );
$totalRemoved++;
}
unset( $post );
unset( $posts );
try {
// Remove objects
$this->deleteObjects( $indexName, $batch );
} catch ( \Exception $exc ) {
throw $exc;
}
unset( $batch );
}
return $totalRemoved;
}
/**
*
* @global type $wpdb
* @param string $indexName
* @param \Maven\Core\Domain\PostType[] $types
* @param int $postsPerPage How many posts per page
* @param int $offset Where to start
* @return void
* @throws \Exception
*/
public function indexData ( $indexName, $types, $postsPerPage = -1, $offset = 0 ) {
// WE will use $wpdb to make the calls faster
global $wpdb;
if ( ! $indexName ) {
throw new \Exception( 'Missing or Invalid Index Name' );
}
$limit = '';
if( (int)$postsPerPage > 0 ){
$limit = sprintf( "LIMIT %d, %d", $offset, $postsPerPage );
}
$postFields = $types->getFieldsIdsForQuery();
$query = $wpdb->prepare( "SELECT {$postFields} FROM {$wpdb->posts} WHERE post_status IN ('publish') AND post_type = %s {$limit}", $types->getType() );
$posts = $wpdb->get_results( $query );
$totalIndexed = 0;
if ( $posts ) {
$batch = array();
// iterate over results and send them by batch of 10000 elements
foreach ( $posts as $post ) {
// select the identifier of this row
$row = $this->postToAlgoliaObject( $post, $types );
array_push( $batch, $row );
$totalIndexed++;
}
unset($row);
unset( $post );
unset( $posts );
try {
$this->indexObjects($indexName, $batch);
} catch ( \Exception $exc ) {
throw $exc;
}
unset( $batch );
}
unset( $postFields );
return $totalIndexed;
}
/*
* ------------------------------------------------------------
* END POSTS SECTION
* ------------------------------------------------------------
*/
/*
* ------------------------------------------------------------
* TAXONOMIES SECTION
* ------------------------------------------------------------
*/
/**
* Convert WP post object to Algolia format
* @global \MavenAlgolia\Core\type $wpdb
* @param object $term
* @param Domain\Taxonomy|string $taxonomy
* @return array
*/
public function termToAlgoliaObject( $term, $taxonomy = null ) {
if( empty( $taxonomy ) && !empty( $term->taxonomy ) ){
$taxonomy = $term->taxonomy;
}
if( is_string( $taxonomy ) ){
$taxonomy = FieldsHelper::getTaxonomyObjectByType( $taxonomy );
}
// select the identifier of this row
$row = array();
// Index WP Tert and Taxonomy tables fields
$fields = $taxonomy->getFields();
if( is_array( $fields ) && !empty( $fields ) ){
foreach( $fields as $field ){
if( isset( $term->{$field->getId()} ) ){
$row[ $field->getLabel() ] = FieldsHelper::formatFieldValue( $term->{$field->getId()}, $field->getType() );
}
}
unset( $field );
}
unset( $fields );
// Index Taxonomy Compound fields
$compoundFields = $taxonomy->getCompoundFields();
if( is_array( $compoundFields ) && !empty( $compoundFields ) ){
foreach( $compoundFields as $compoundField ){
$row[ $compoundField->getLabel() ] = FieldsHelper::getTaxCompoundFieldValue( $term, $compoundField->getId() );
}
unset( $compoundField );
}
unset( $compoundFields );
// Index Term meta fields
// $metaFields = $taxonomy->getMetaFields();
// if( is_array( $metaFields ) && !empty( $metaFields ) ){
// foreach( $metaFields as $metaField ){
// $metaValue = get_post_meta( $taxonomy->ID, $metaField->getId(), $metaField->isSingle() );
// if( $metaValue !== FALSE ){
// if( !is_array( $metaValue ) ){
// $metaValue = FieldsHelper::formatFieldValue( $metaValue, $metaField->getType() );
// }
// $row[ $metaField->getLabel() ] = $metaValue;
// }
// }
// unset( $metaValue );
// unset( $metaField );
// }
// unset( $metaFields );
return $row;
}
/**
*
* @global type $wpdb
* @param string $indexName
* @param \Maven\Core\Domain\PostType[] $types
* @param int $postsPerPage How many posts per page
* @param int $offset Where to start
* @return void
* @throws \Exception
*/
public function indexTaxonomyData ( $indexName, $taxonomy, $postsPerPage = -1, $offset = 0 ) {
// WE will use $wpdb to make the calls faster
global $wpdb;
if ( ! $indexName ) {
throw new \Exception( 'Missing or Invalid Index Name' );
}
$limit = '';
if( (int)$postsPerPage > 0 ){
$limit = sprintf( "LIMIT %d, %d", $offset, $postsPerPage );
}
$termFields = $taxonomy->getFieldsIdsForQuery();
$showEmpty = 0;
$query = $wpdb->prepare( "SELECT {$termFields} FROM {$wpdb->terms} INNER JOIN {$wpdb->term_taxonomy} ON {$wpdb->terms}.term_id = {$wpdb->term_taxonomy}.term_id WHERE {$wpdb->term_taxonomy}.taxonomy = %s AND {$wpdb->term_taxonomy}.count >= %d {$limit}", $taxonomy->getType(), $showEmpty );
$terms = $wpdb->get_results( $query );
$totalIndexed = 0;
if ( $terms ) {
$batch = array();
// iterate over results and send them by batch of 10000 elements
foreach ( $terms as $term ) {
// select the identifier of this row
$row = $this->termToAlgoliaObject( $term, $taxonomy );
array_push( $batch, $row );
$totalIndexed++;
}
unset($row);
unset( $term );
unset( $terms );
// echo json_encode( $batch );
// die;
//
try {
$this->indexObjects($indexName, $batch);
} catch ( \Exception $exc ) {
throw $exc;
}
unset( $batch );
}
unset( $termFields );
return $totalIndexed;
}
/*
* ------------------------------------------------------------
* END TAXONOMIES SECTION
* ------------------------------------------------------------
*/
} | 7ebd60fa67e37f5c6ec569cceccb27c89cd913f0 | [
"JavaScript",
"PHP"
] | 11 | PHP | asm-products/really-good-emails-wordpress | b3cb016656d26550dfed0faafda5d05785d13c87 | 4980985ddb8bc37f89d0511232e58e33b1d5388a |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace deckOfCards.Controllers
{
public class HomeController : Controller
{
String[] Initialdeck= { "c2", "c3", "c4", "c5", "c6", "c7", "c8", "c9", "c10", "cJ", "cQ", "cK", "cA", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "d10", "dJ", "dQ", "dK", "dA", "h2", "h3", "h4", "h5", "h6", "h7", "h8", "h9", "h10", "hJ", "hQ", "hK", "hA", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9", "s10", "sJ", "sQ", "sK", "sA" }; // initial deck of cards with all card symbols included (ex: s4 = Spade 4)
List<string> InitialDealtCards = new List<string>(new string[] { }); // empty list of strings created only for better understanding of the code
public ActionResult Index()
{
if (Session["Deck"] == null) // checking if this is the first visit in the session
{
// instantiate a new one
Session["Deck"] = Initialdeck; // initializing the deck
Session["ErrorMessage"] = null;
}
if (Session["DealtCards"] == null) // checking if this is the first visit in the session
{
// instantiate a new one
Session["DealtCards"] = InitialDealtCards; // initializing to the empty list of strings ( to be populated later on)
}
//ViewData is used to pass data from controller to view. Another approach was to access the session variables directly, but that would be unprofessional
ViewData["DeckArray"] = Session["Deck"];
ViewData["DealtCardsArray"] = Session["DealtCards"];
return View();
}
public ActionResult Shuffle() //shuffle the deck
{
// Algorithm is Based on Java code from wikipedia:
// http://en.wikipedia.org/wiki/Fisher-Yates_shuffle
Random r = new Random();
string[] deck = (String[])Session["Deck"]; //getting the deck elements from the session variable
if (deck.Length != 0) //checking if there are elements in the deck
{
for (int n = deck.Length - 1; n > 0; --n) //shuffling
{
int k = r.Next(n + 1);
string temp = deck[n];
deck[n] = deck[k];
deck[k] = temp;
}
Session["Deck"] = deck; // session variable = shuffled deck
}
else // there are no more elements in the deck
{
Session["ErrorMessage"] = "No more cards in deck, click Start Again";
}
return RedirectToAction("Index");
}
public ActionResult DealOneCard()
{
string[] deck = (String[])Session["Deck"]; //getting deck from session
List<string> DealtCards = (List<string>)Session["DealtCards"]; //getting list of dealt cards
if (deck.Length != 0) // there are more cards in the deck
{
DealtCards.Add(deck[0]); // dealing the first card in the deck
Session["DealtCards"] = DealtCards; //updating list of dealt cards to the session
Session["Deck"] = deck.Skip(1).ToArray(); //updating deck to the session
}
else // in case no more cards are in the deck
{
Session["ErrorMessage"] = "No more cards in deck, click Start Again";
}
return RedirectToAction("Index");
}
public ActionResult Clear()
{
// restoring initial conditions
Session["Deck"] = Initialdeck;
Session["DealtCards"] = InitialDealtCards;
Session["ErrorMessage"] = null;
return RedirectToAction("Index");
}
}
} | 129ec9be597cff9e8a620950363bf93859f302d3 | [
"C#"
] | 1 | C# | jawadalami/deckOfCards | 97b05d048ffb685647cd0b04c9121d05f254e4b1 | c627ae8ba0e25bbc21f41dd0b243cec8643d5767 |
refs/heads/master | <file_sep># API Documentation
## 1. Sequence counts
Description
```
Returns number of sequences per day by location
```
Endpoint: https://api.outbreak.info/genomics/sequence-count
Parameters
* `location_id` (Optional). If not specified, the global total counts are returned.
* `cumulative` (Optional). If `true` returns the cumulative number of sequences till date.
* subadmin (Optional). If `true` and `cumulative`=`true`, returns the cumulative number of sequences for the immedaite lower admin level.
Examples,
Number of sequences per day globally.
https://api.outbreak.info/genomics/sequence-count
Cumulative number of sequences for every US state.
https://api.outbreak.info/genomics/sequence-count?location_id=USA&cumulative=true&subadmin=true
Daily number of sequences for California
https://api.outbreak.info/genomics/sequence-count?location_id=USA_US-CA
## 2. [DEPRECATED] Global daily prevalence of a PANGO lineage
```
Returns the global daily prevalence of a PANGO lineage.
DEPRECATED: replaced by `prevalence-by-location` with no location specified.
```
Endpoint: https://api.outbreak.info/genomics/global-prevalence
Parameters
* `pangolin_lineage` (Optional). Returns global value if not specified.
* `mutations` (Optional). Comma separated list of mutations.
* `cumulative` (Optional). If `true` returns the cumulative global prevalence since the first day of detection.
Examples,
Global daily prevalence of B.1.1.7 lineage
https://api.outbreak.info/genomics/global-prevalence?pangolin_lineage=b.1.1.7
Global daily prevalence of B.1.1.7 lineage with S:E484K mutation
https://api.outbreak.info/genomics/global-prevalence?pangolin_lineage=b.1.1.7&mutations=S:E484K
Cumulative global prevalence of B.1.1.7
https://api.outbreak.info/genomics/global-prevalence?pangolin_lineage=b.1.1.7&cumulative=true
## 3. Daily prevalence of a PANGO lineage by location
```
Returns the daily prevalence of a PANGO lineage by location
```
Endpoint: https://api.outbreak.info/genomics/prevalence-by-location
Parameters
* `pangolin_lineage` (Required). List of lineages separated by `,`
* `location_id` (Optional).
* `mutations` (Optional). List of mutations separated by `AND`
* `cumulative` (Optional). If `true` returns the cumulative global prevalence since the first day of detection.
Examples,
Global daily prevalence of B.1.1.7 lineage in the United States
https://api.outbreak.info/genomics/prevalence-by-location?pangolin_lineage=b.1.1.7&location_id=USA
Global daily prevalence of B.1.1.7 lineage with S:E484K mutation in the United States
https://api.outbreak.info/genomics/prevalence-by-location?pangolin_lineage=b.1.1.7&mutations=S:E484K&location_id=USA
Cumulative prevalence of B.1.1.7 in the United States
https://api.outbreak.info/genomics/prevalence-by-location?pangolin_lineage=b.1.1.7&cumulative=true&location_id=USA
## 4. Cumulative prevalence of a PANGO lineage by the immediate admin level of a location
Endpoint: https://api.outbreak.info/genomics/lineage-by-sub-admin-most-recent
Parameters:
* `pangolin_lineage` (Required). List of lineages separated by `,`
* `mutations` (Optional). List of mutations separated by `AND`.
* `location_id` (Optional). If not specified, returns cumulative prevalence at the country level globally.
* `ndays` (Optional). Specify number of days from current date to calculative cumuative counts. If not specified, there is no limit on the window.
* `detected` (Optional). If `true` returns only the name of the location if at least one sequence of the variant has been detected in the given time window.
## 5. Most recent collection date by location
Endpoint: https://api.outbreak.info/genomics/most-recent-collection-date-by-location
Parameters:
* `pangolin_lineage` (Required).
* `mutations` (Optional). Comma separated list of mutations.
* `location_id` (Optional). If not specified, return most recent date globally.
Examples,
Most recent collection date of B.1.1.7 PANGO lineage globally.
https://api.outbreak.info/genomics/most-recent-collection-date-by-location?pangolin_lineage=b.1.1.7
Most recent collection date of B.1.1.7 PANGO lineage with S:E484K globally.
https://api.outbreak.info/genomics/most-recent-collection-date-by-location?pangolin_lineage=b.1.1.7&mutations=S:E484K
Most recent collection date of B.1.1.7 PANGO lineage in California
https://api.outbreak.info/genomics/most-recent-collection-date-by-location?pangolin_lineage=b.1.1.7&location_id=USA_US-CA
## 6. Most recent submission date by location
Endpoint: https://api.outbreak.info/genomics/most-recent-submission-date-by-location
Parameters:
* `pangolin_lineage` (Required).
* `mutations` (Optional). Comma separated list of mutations.
* `location_id` (Optional). If not specified, return most recent date globally.
Examples,
Most recent submission date of B.1.1.7 PANGO lineage globally.
https://api.outbreak.info/genomics/most-recent-submission-date-by-location?pangolin_lineage=b.1.1.7
Most recent submission date of B.1.1.7 PANGO lineage with S:E484K globally.
https://api.outbreak.info/genomics/most-recent-submission-date-by-location?pangolin_lineage=b.1.1.7&mutations=S:E484K
Most recent submission date of B.1.1.7 PANGO lineage in California
https://api.outbreak.info/genomics/most-recent-submission-date-by-location?pangolin_lineage=b.1.1.7&location_id=USA_US-CA
## 7. Get details of a mutation
Endpoint: https://api.outbreak.info/genomics/mutation-details
Parameters:
* `mutations` (Required). Comma separated list of mutations.
Examples,
Get details of S:E484K and S:N501Y
https://api.outbreak.info/genomics/mutation-details?mutations=S:E484K,S:N501Y
## 8. Get prevalence of a mutation across lineages per location
Endpoint: https://api.outbreak.info/genomics/mutations-by-lineage
Parameters
* `mutations` (Optional). List of mutations separated by `AND`.
* `location_id` (Optional). If not specified, return most recent date globally.
* `pangolin_lineage` (Optional). If not specfied, returns all Pango lineages containing that mutation.
* `frequency` (Optional) Minimimum frequency threshold for the prevalence of a mutation in a lineage.
Examples,
Get prevalence of S:E484K across all lineages in the U.S.
https://api.outbreak.info/genomics/mutations-by-lineage?mutations=S:E484K&location_id=USA
## 9. Get prevalence of mutations in a lineage above a frequency threshold
Endpoint: https://api.outbreak.info/genomics/lineage-mutations
Parameters
* `pangolin_lineage` (Required). List of lineages separated by `OR`. List of mutations can be added as, `Lineage 1 OR Lineage 2 OR Lineage 3 AND Mutation 1 AND Mutation 2`. Mutiple queries can be separated by `,`.
* `frequency` (Optional, default: 0.8). A number between 0 and 1 specifying the threshold above which to return mutations.
Examples,
Get all mutations in A.27 lineage.
https://api.outbreak.info/genomics/lineage-mutations?pangolin_lineage=A.27
## 10. Return the daily lag between collection and submission dates by location
Endpoint: https://api.outbreak.info/genomics/collection-submission
Parameters:
* `location_id` (Optional). If not specified, return lag globally.
## 11. Match lineage name using wildcards.
Endpoint: https://api.outbreak.info/genomics/lineage
Parameters:
* `name` (Required). Supports wildcards.
Examples,
Get all lineages that start with b.1
https://api.outbreak.info/genomics/lineage?name=b.1.*
## 12. Match location name using wildcards.
Endpoint: https://api.outbreak.info/genomics/location
Parameters:
* `name` (Required). Supports wildcards.
Examples,
Get all locations that start with united
https://api.outbreak.info/genomics/location?name=united*
## 13. Get location details using ID.
Parameters:
* `id` (Required).
Examples,
Get location details using id: `USA_US-CA`
https://api.outbreak.info/genomics/location-lookup?id=USA_US-CA
## 14. Match mutations using wildcards.
Endpoint: https://api.outbreak.info/genomics/mutations
Parameters:
* `name` (Required)
Examples,
Get all mutations that start with S:E484
https://api.outbreak.info/genomics/mutations?name=S:E484*
## 15. Get prevalence of all lineages over time for a location (version 2)
Endpoint: https://api.outbreak.info/genomics/v2/prevalence-by-location-all-lineages
Paramters:
* `location_id` (Required)
* `other_threshold` (Default: `0.05`) Minimum prevalence threshold below which lineages must be accumulated under "Other".
* `nday_threshold` (Default: `10`) Minimum number of days in which the prevalence of a lineage must be below `other_threshold` to be accumulated under "Other".
* `ndays` (Default: `180`) The number of days before the current date to be used as a window to accumulate linegaes under "Other".
* `other_exclude` Comma separated lineages that are NOT to be included under "Other" even if the conditions specified by the three thresholds above are met.
* `cumulative` (Default: `false`) If `true` return the cumulative prevalence.
* `window` (Default: `none`) If `true` return the cumulative prevalence.
* `max_date` (Default: `none`) Newest date collected filter, should be in ISO 8601 YYYY-MM-DD format, the filtering should be inclusive (as in <= (less than or equal to) and >= (greater than or equal to) of the min_date and max_date.
* `min_date` (Default: `none`) Oldest date collected filter, should be in ISO 8601 YYYY-MM-DD format, the filtering should be inclusive (as in <= (less than or equal to) and >= (greater than or equal to) of the min_date and max_date.
Examples,
Give me the prevalence of all lineages in the U.S., classify lineages that are below 0.03 prevalence for atleast 5 days over the last 60 days as "Other", and exclude p.1 from "Other" even if conditions for "Other" are satisfied.
https://api.outbreak.info/genomics/v2/prevalence-by-location-all-lineages?location_id=USA&other_threshold=0.03&nday_threshold=5&ndays=60&other_exclude=p.1&max_date=2023-01-01&min_date=2022-12-01
<file_sep>from .base import BaseHandler
from tornado import gen
import pandas as pd
from .util import create_nested_mutation_query, calculate_proportion, parse_location_id_to_query, get_total_hits
import re
class LineageByCountryHandler(BaseHandler):
@gen.coroutine
def _get(self):
query_pangolin_lineage = self.get_argument("pangolin_lineage", None)
query_mutations = self.get_argument("mutations", None)
query = {
"aggs": {
"prevalence": {
"filter" : {},
"aggs": {
"country": {
"terms": {
"field": "country",
"size": self.size
}
}
}
}
}
}
query_mutations = query_mutations.split(",") if query_mutations is not None else []
query_pangolin_lineage = query_pangolin_lineage.split(",") if query_pangolin_lineage is not None else []
query_obj = create_nested_mutation_query(lineages = query_pangolin_lineage, mutations = query_mutations)
query["aggs"]["prevalence"]["filter"] = query_obj
resp = yield self.asynchronous_fetch(query)
return resp
class LineageByDivisionHandler(BaseHandler):
@gen.coroutine
def _get(self):
query_pangolin_lineage = self.get_argument("pangolin_lineage", None)
query_country = self.get_argument("country", None)
query_mutations = self.get_argument("mutations", None)
query = {
"aggs": {
"prevalence": {
"filter" : {},
"aggs": {
"division": {
"terms": {
"field": "division",
"size": self.size
}
}
}
}
}
}
query_mutations = query_mutations.split(",") if query_mutations is not None else []
query_pangolin_lineage = query_pangolin_lineage.split(",") if query_pangolin_lineage is not None else []
query_obj = create_nested_mutation_query(country = query_country, lineages = query_pangolin_lineage, mutations = query_mutations)
query["aggs"]["prevalence"]["filter"] = query_obj
print(query)
resp = yield self.asynchronous_fetch(query)
return resp
# Calculate total number of sequences with a particular lineage in a country
class LineageAndCountryHandler(BaseHandler):
@gen.coroutine
def _get(self):
query_country = self.get_argument("country", None)
query_pangolin_lineage = self.get_argument("pangolin_lineage", None)
query_mutations = self.get_argument("mutations", None)
query = {
"query": {}
}
query_mutations = query_mutations.split(",") if query_mutations is not None else []
query_pangolin_lineage = query_pangolin_lineage.split(",") if query_pangolin_lineage is not None else []
query_obj = create_nested_mutation_query(country = query_country, lineages = query_pangolin_lineage, mutations = query_mutations)
query["query"] = query_obj
resp = yield self.asynchronous_fetch(query)
return resp
# Calculate total number of sequences with a particular lineage in a division
class LineageAndDivisionHandler(BaseHandler):
@gen.coroutine
def _get(self):
query_country = self.get_argument("country", None)
query_division = self.get_argument("division", None)
query_pangolin_lineage = self.get_argument("pangolin_lineage", None)
query_mutations = self.get_argument("mutations", None)
query = {
"query": {}
}
query_mutations = query_mutations.split(",") if query_mutations is not None else []
query_pangolin_lineage = query_pangolin_lineage.split(",") if query_pangolin_lineage is not None else []
query_obj = create_nested_mutation_query(country = query_country, division = query_division, lineages = query_pangolin_lineage, mutations = query_mutations)
query["query"] = query_obj
resp = yield self.asynchronous_fetch(query)
return resp
class LineageHandler(BaseHandler):
@gen.coroutine
def _get(self):
query_str = self.get_argument("name", None)
size = self.get_argument("size", None)
query = {
"size": 0,
"query": {
"wildcard": {
"pangolin_lineage": {
"value": query_str
}
}
},
"aggs": {
"lineage": {
"terms": {
"field": "pangolin_lineage",
"size": 10000
}
}
}
}
resp = yield self.asynchronous_fetch(query)
path_to_results = ["aggregations", "lineage", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = [{
"name": i["key"],
"total_count": i["doc_count"]
} for i in buckets]
if size:
try:
size = int(size)
except Exception:
return {"success": False, "results": [], "errors": "Invalide size value"}
flattened_response = sorted(flattened_response, key=lambda x: -x["total_count"])
flattened_response = flattened_response[:size]
resp = {"success": True, "results": flattened_response}
return resp
class LineageMutationsHandler(BaseHandler):
gene_mapping = {
"orf1a" : "ORF1a",
"orf1b" : "ORF1b",
"s" : "S",
"orf3a" : "ORF3a",
"e": "E",
"m" : "M",
"orf6": "ORF6",
"orf7a" : "ORF7a",
"orf7b" : "ORF7b",
"orf8" : "ORF8",
"n" : "N",
"orf10" : "ORF10"
}
@gen.coroutine
def _get(self):
pangolin_lineage = self.get_argument("pangolin_lineage", None)
frequency = self.get_argument("frequency", None)
frequency = float(frequency) if frequency != None else 0.8
dict_response = {}
# Query structure: Lineage 1 OR Lineage 2 OR Lineage 3 AND Mutation 1 AND Mutation 2, Lineage 4 AND Mutation 2, Lineage 5 ....
for query_lineage in pangolin_lineage.split(","):
query = {
"size": 0,
"query": {
},
"aggs": {
"mutations": {
"nested": {
"path": "mutations"
},
"aggs": {
"mutations": {
"terms": {
"field": "mutations.mutation",
"size": 10000
},
"aggs": {
"genomes": {
"reverse_nested": {}
}
}
}
}
}
}
}
query_lineage_split = query_lineage.split(" AND ")
query_mutations = []
query_pangolin_lineage = query_lineage_split[0].split(" OR ") # First parameter always lineages separated by commas
if len(query_lineage_split) > 1:
query_mutations = query_lineage_split[1:] # First parameter is always lineage
query["query"] = create_nested_mutation_query(lineages = query_pangolin_lineage, mutations = query_mutations)
#print(query)
resp = yield self.asynchronous_fetch(query)
path_to_results = ["aggregations", "mutations", "mutations", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = [{
"mutation": i["key"],
"mutation_count": i["genomes"]["doc_count"],
"lineage_count": get_total_hits(resp),
"lineage": query_lineage
} for i in buckets]
if len(flattened_response) > 0:
df_response = (
pd.DataFrame(flattened_response)
.assign(
gene = lambda x: x["mutation"].apply(lambda k: self.gene_mapping[k.split(":")[0]] if k.split(":")[0] in self.gene_mapping else k.split(":")[0]),
ref_aa = lambda x: x["mutation"].apply(lambda k: re.findall("[A-Za-z*]+", k.split(":")[1])[0] if "DEL" not in k and "del" not in k and "_" not in k else k).str.upper(),
alt_aa = lambda x: x["mutation"].apply(lambda k: re.findall("[A-Za-z*]+", k.split(":")[1])[1] if "DEL" not in k and "del" not in k and "_" not in k else k.split(":")[1]).str.upper(),
codon_num = lambda x: x["mutation"].apply(lambda k: int(re.findall("[0-9]+", k.split(":")[1])[0])),
codon_end = lambda x: x["mutation"].apply(lambda k: int(re.findall("[0-9]+", k.split(":")[1])[1]) if "/" in k and ("DEL" in k or "del" in k) else None),
type = lambda x: x["mutation"].apply(lambda k: "deletion" if "DEL" in k or "del" in k else "substitution")
)
)
df_response = df_response[df_response["ref_aa"] != df_response["alt_aa"]]
df_response.loc[:, "prevalence"] = df_response["mutation_count"]/df_response["lineage_count"]
df_response.loc[~df_response["codon_end"].isna(), "change_length_nt"] = ((df_response["codon_end"] - df_response["codon_num"]) + 1) * 3
df_response = df_response[df_response["prevalence"] >= frequency].fillna("None")
dict_response[query_lineage] = df_response.to_dict(orient="records")
resp = {"success": True, "results": dict_response}
return resp
class MutationDetailsHandler(BaseHandler):
@gen.coroutine
def _get(self):
mutations = self.get_argument("mutations", None)
mutations = mutations.split(",") if mutations is not None else []
query = {
"size": 0,
"aggs": {
"by_mutations": {
"nested": {
"path": "mutations"
},
"aggs": {
"inner": {
"filter": {
"bool": {
"should": [
{"match": {"mutations.mutation": i}}
for i in mutations
]
}
},
"aggs": {
"by_name": {
"terms": {"field": "mutations.mutation"},
"aggs": {
"by_nested": {
"top_hits": {"size": 1}
}
}
}
}
}
}
}
}
}
resp = yield self.asynchronous_fetch(query)
path_to_results = ["aggregations", "by_mutations", "inner", "by_name", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = []
for i in buckets:
for j in i["by_nested"]["hits"]["hits"]:
tmp = j["_source"]
for k in ["change_length_nt", "codon_num", "pos"]:
if k in tmp and tmp[k] != "None":
tmp[k] = int(float(tmp[k]))
flattened_response.append(tmp)
resp = {"success": True, "results": flattened_response}
return resp
class MutationsByLineage(BaseHandler):
@gen.coroutine
def _get(self):
query_location = self.get_argument("location_id", None)
query_mutations = self.get_argument("mutations", None)
query_pangolin_lineage = self.get_argument("pangolin_lineage", None)
query_mutations = [muts.split(",") for muts in query_mutations.split(" AND ")] if query_mutations is not None else []
query_frequency_threshold = self.get_argument("frequency", None)
query_frequency_threshold = float(query_frequency_threshold) if query_frequency_threshold is not None else 0
results = {}
for muts in query_mutations: # For multiple sets of mutations, create multiple ES queries. Since AND queries are possible doing one ES query with aggregations is cumbersome. Must look for better solution here.
query = {
"size": 0,
"aggs": {
"lineage": {
"terms": {"field": "pangolin_lineage", "size": self.size},
"aggs": {
"mutations": {
"filter": {}
}
}
}
}
}
if query_location is not None:
query["query"] = parse_location_id_to_query(query_location)
if query_pangolin_lineage is not None:
if "query" in query: # Only query added will be bool for location
query["query"]["bool"]["must"].append({
"term": {
"pangolin_lineage": query_pangolin_lineage
}
})
else:
query["query"] = {
"term": {
"pangolin_lineage": query_pangolin_lineage
}
}
query["aggs"]["lineage"]["aggs"]["mutations"]["filter"] = create_nested_mutation_query(mutations = muts)
resp = yield self.asynchronous_fetch(query)
path_to_results = ["aggregations", "lineage", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = []
for i in buckets:
if not i["mutations"]["doc_count"] > 0 or i["key"] == "none":
continue
flattened_response.append({
"pangolin_lineage": i["key"],
"lineage_count": i["doc_count"],
"mutation_count": i["mutations"]["doc_count"]
})
df_response = pd.DataFrame(flattened_response)
if df_response.shape[0] > 0:
prop = calculate_proportion(df_response["mutation_count"], df_response["lineage_count"])
df_response.loc[:, "proportion"] = prop[0]
df_response.loc[:, "proportion_ci_lower"] = prop[1]
df_response.loc[:, "proportion_ci_upper"] = prop[2]
df_response = df_response[df_response["proportion"] >= query_frequency_threshold]
results[",".join(muts)] = df_response.to_dict(orient="records")
resp = {"success": True, "results": results}
return resp
<file_sep>import argparse
import requests
import paramiko
import secrets
from sys import argv
def attach_repository(name):
backup_name = f"{name}_backup"
data = {
"name": f"outbreak_{name}_repository",
"type": "s3",
"settings": {
"bucket": "biothings-es6-snapshots",
"region": "us-west-2",
"base_path": f"outbreak/{name}"
}
}
res = requests.put(f"http://localhost:9200/_snapshot/{backup_name}?pretty", json=data)
return res
def fetch_index(name, index_name, index_rename):
backup_name = f"{name}_backup"
print(requests.delete(f'http://localhost:9200/{index_rename}'))
data = {
"indices": f"{index_name}",
"ignore_unavailable": True,
"include_global_state": True,
"allow_no_indices": False,
"rename_pattern": ".+",
"rename_replacement": f"{index_rename}"
}
res = requests.post(f"http://localhost:9200/_snapshot/{backup_name}/{index_name}/_restore?pretty", json=data)
return res
def push(restart):
key = paramiko.RSAKey.from_private_key_file(secrets.PRIVATE_KEY)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
host = secrets.DEV_HOST
ssh.connect(host, port=secrets.PORT, username=secrets.USERNAME, pkey=key)
commands = [
'echo "Performing git pull"',
'cd outbreak.api',
'git pull',
]
if restart:
commands += [
'echo "Performing server restart"',
'sudo systemctl restart outbreak_web.service',
'sudo journalctl -u outbreak_web.service | tail -2'
]
stdin, stdout, stderr = ssh.exec_command(' && '.join(commands))
print(''.join(stdout.readlines()))
err = stderr.readlines()
if err:
print('----- Errors ----------')
print(''.join(err))
if __name__ == '__main__':
if argv[1] == 'push':
restart = True
if len(argv) > 2 and argv[2] == '-no-restart':
restart = False
push(restart)
elif argv[1] == 'update':
fetch_index('genomics_dev', argv[2], 'outbreak-genomics')
<file_sep>from biothings.web.launcher import main
from tornado.web import RedirectHandler, StaticFileHandler
from web.handlers import ApiViewHandler, MainHandler, SpecialHandler
if __name__ == "__main__":
main(
[
(r"/", MainHandler),
(r"/v1/(.*)", RedirectHandler, {"url": "/covid19/{0}"}),
(r"/try-by-doctype/resources/?", SpecialHandler),
(r"/try/.+", ApiViewHandler),
(r"/try/.+/.+", ApiViewHandler),
],
{
"static_path": "static"
}
) # additionals
<file_sep>from .jinja import MainHandler, ApiViewHandler, SpecialHandler
from .api import OutbreakBiothingHandler, OutbreakQueryHandler, GRQueryHandler, GRMetadataSourceHandler
<file_sep>from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import parse_location_id_to_query
class LocationDetailsHandler(BaseHandler):
name = "location-lookup"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"id": {"type": str, "required": True},
}
async def _get(self):
query_str = self.args.id
query_ids = query_str.split("_")
query = {"query": {}, "aggs": {"loc": {"composite": {"size": 10000, "sources": []}}}}
loc_id_len = len(query_ids)
if loc_id_len >= 1:
query["aggs"]["loc"]["composite"]["sources"].extend(
[
{"country": {"terms": {"field": "country"}}},
{"country_id": {"terms": {"field": "country_id"}}},
]
)
if loc_id_len >= 2: # 3 is max length
query["aggs"]["loc"]["composite"]["sources"].extend(
[
{"division": {"terms": {"field": "division"}}},
{"division_id": {"terms": {"field": "division_id"}}},
]
)
if loc_id_len == 3: # 3 is max length
query["aggs"]["loc"]["composite"]["sources"].extend(
[
{"location": {"terms": {"field": "location"}}},
{"location_id": {"terms": {"field": "location_id"}}},
]
)
query["query"] = parse_location_id_to_query(query_str)
resp = await self.asynchronous_fetch(query)
flattened_response = []
for rec in resp["aggregations"]["loc"]["buckets"]:
if loc_id_len == 1:
flattened_response.append(
{
"country": rec["key"]["country"],
"country_id": rec["key"]["country_id"],
"label": rec["key"]["country"],
"admin_level": 0,
}
)
elif loc_id_len == 2:
flattened_response.append(
{
"division": rec["key"]["division"],
"division_id": rec["key"]["division_id"],
"country": rec["key"]["country"],
"country_id": rec["key"]["country_id"],
"label": ", ".join([rec["key"]["division"], rec["key"]["country"]]),
"admin_level": 1,
}
)
elif loc_id_len == 3:
flattened_response.append(
{
"location": rec["key"]["location"],
"location_id": rec["key"]["location_id"],
"division": rec["key"]["division"],
"division_id": rec["key"]["division_id"],
"country": rec["key"]["country"],
"country_id": rec["key"]["country_id"],
"label": ", ".join(
[rec["key"]["location"], rec["key"]["division"], rec["key"]["country"]]
),
"admin_level": 2,
}
)
if len(flattened_response) >= 1:
flattened_response = flattened_response[0] # ID should match only 1 region
flattened_response["query_id"] = query_str
resp = {"success": True, "results": flattened_response}
return resp
<file_sep>//WORKBOX
importScripts('https://storage.googleapis.com/workbox-cdn/releases/3.0.0/workbox-sw.js');
workbox.precaching.precacheAndRoute([
{
"url": "/static/css/app.css",
"revision": "fd2e1d3c4c8d43da10afe67a7d69fbd1"
},
{
"url": "/",
"revision": "39b8fb34f8be7ecf969530f1b9e69ba1"
},
{
"url": "/static/js/contribute.js",
"revision": "<PASSWORD>"
},
{
"url": "/static/js/renderjson.js",
"revision": "<PASSWORD>"
},
{
"url": "/denovodb",
"revision": "<PASSWORD>"
},
{
"url": "/static/js/worker.js",
"revision": "<PASSWORD>50e410"
},
{
"url": "https://pending.biothings.io/",
"revision": "<PASSWORD>10"
},
{
"url": "https://pending.biothings.io/denovodb/metadata",
"revision": "<PASSWORD>10"
},
{
"url": "https://pending.biothings.io/fire/metadata",
"revision": "<PASSWORD>50e410"
},
{
"url": "https://pending.biothings.io/ccle/metadata",
"revision": "03bde26b6af07cd6bb0378ec0a50e410"
},
{
"url": "https://pending.biothings.io/biomuta/metadata",
"revision": "03bde26b6af07cd6bb0378ec0a50e410"
},
{
"url": "https://pending.biothings.io/kaviar/metadata",
"revision": "03bde26b6af07cd6bb0378ec0a50e410"
}
]);
workbox.routing.registerRoute(
new RegExp('https://pending.biothings.io/denobodb/metadata'),
workbox.strategies.cacheFirst()
);
workbox.routing.registerRoute(
new RegExp('https://pending.biothings.io/biomuta/metadata'),
workbox.strategies.cacheFirst()
);
workbox.routing.registerRoute(
new RegExp('https://pending.biothings.io/fire/metadata'),
workbox.strategies.cacheFirst()
);
workbox.routing.registerRoute(
new RegExp('https://pending.biothings.io/kaviar/metadata'),
workbox.strategies.cacheFirst()
);
workbox.routing.registerRoute(
new RegExp('https://pending.biothings.io/'),
workbox.strategies.cacheFirst()
);
workbox.routing.registerRoute(
new RegExp('https://pending.biothings.io/ccle/metadata'),
workbox.strategies.cacheFirst()
);
workbox.routing.registerRoute(
new RegExp('/'),
new workbox.strategies.CacheFirst()
);
workbox.routing.registerRoute(
new RegExp('/denobodb'),
new workbox.strategies.CacheFirst()
);
workbox.routing.registerRoute(
new RegExp('/static/js/contribute.js'),
new workbox.strategies.CacheFirst()
);
workbox.routing.registerRoute(
new RegExp('/static/js/renderjson.js'),
new workbox.strategies.CacheFirst()
);
<file_sep># flake8: noqa
from .general import (
GisaidIDHandler,
LocationDetailsHandler,
LocationHandler,
MetadataHandler,
MostRecentCollectionDateHandler,
MostRecentSubmissionDateHandler,
MutationHandler,
SequenceCountHandler,
SubmissionLagHandler,
)
from .gisaid_auth import GISAIDTokenHandler
from .lineage import (
LineageAndCountryHandler,
LineageAndDivisionHandler,
LineageByCountryHandler,
LineageByDivisionHandler,
LineageHandler,
LineageMutationsHandler,
MutationDetailsHandler,
MutationsByLineage,
)
from .prevalence import (
CumulativePrevalenceByLocationHandler,
GlobalPrevalenceByTimeHandler,
PrevalenceAllLineagesByLocationHandler,
PrevalenceByAAPositionHandler,
PrevalenceByLocationAndTimeHandler,
)
<file_sep>from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import (
create_iterator,
create_nested_mutation_query,
parse_location_id_to_query,
transform_prevalence,
)
class PrevalenceByLocationAndTimeHandler(BaseHandler):
name = "prevalence-by-location"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"pangolin_lineage": {"type": str},
"mutations": {"type": str, "default": None},
"location_id": {"type": str, "default": None},
"cumulative": {"type": bool, "default": False},
"min_date": {"type": str, "default": None, "date_format": "%Y-%m-%d"},
"max_date": {"type": str, "default": None, "date_format": "%Y-%m-%d"},
}
async def _get(self):
query_location = self.args.location_id
query_pangolin_lineage = self.args.pangolin_lineage
query_pangolin_lineage = (
query_pangolin_lineage.split(",") if query_pangolin_lineage is not None else []
)
query_mutations = self.args.mutations
query_mutations = query_mutations.split(" AND ") if query_mutations is not None else []
cumulative = self.args.cumulative
date_range_filter = {"query": {"range": {"date_collected": {}}}}
if self.args.max_date:
date_range_filter["query"]["range"]["date_collected"]["lte"] = self.args.max_date
if self.args.min_date:
date_range_filter["query"]["range"]["date_collected"]["gte"] = self.args.min_date
results = {}
for i, j in create_iterator(query_pangolin_lineage, query_mutations):
query = {
"size": 0,
"aggs": {
"prevalence": {
"filter": {"bool": {"must": []}},
"aggs": {
"count": {
"terms": {"field": "date_collected", "size": self.size},
"aggs": {"lineage_count": {"filter": {}}},
}
},
}
},
}
if self.args.max_date or self.args.min_date:
query.update(date_range_filter)
parse_location_id_to_query(query_location, query["aggs"]["prevalence"]["filter"])
lineages = i.split(" OR ") if i is not None else []
query_obj = create_nested_mutation_query(
lineages=lineages, mutations=j, location_id=query_location
)
query["aggs"]["prevalence"]["aggs"]["count"]["aggs"]["lineage_count"][
"filter"
] = query_obj
# import json
# print(json.dumps(query))
resp = await self.asynchronous_fetch(query)
path_to_results = ["aggregations", "prevalence", "count", "buckets"]
resp = transform_prevalence(resp, path_to_results, cumulative)
res_key = None
if len(query_pangolin_lineage) > 0:
res_key = " OR ".join(lineages)
if len(query_mutations) > 0:
res_key = (
"({}) AND ({})".format(res_key, " AND ".join(query_mutations))
if res_key is not None
else " AND ".join(query_mutations)
)
results[res_key] = resp
return {"success": True, "results": results}
<file_sep># unless defined this below variable is defined, sources will be auto-discovered
# from hub.dataload.sources path
#__sources__ = [
# # declare sources there (path to main package, as a string):
# #"hub.dataload.sources.my_source"
# ]
<file_sep>from copy import deepcopy
from biothings.web.settings.default import (ANNOTATION_KWARGS, APP_LIST,
QUERY_KWARGS)
# *****************************************************************************
# Elasticsearch variables
# *****************************************************************************
# ES_HOST = 'localhost:9200'
ES_INDEX = 'outbreak-resources-*'
ES_DOC_TYPE = 'resource'
# *****************************************************************************
# Web Application
# *****************************************************************************
API_PREFIX = 'resources'
API_VERSION = ''
APP_LIST = deepcopy(APP_LIST)
APP_LIST += [
(r"/{pre}/{ver}/([^\/]+)/query/?", 'web.handlers.OutbreakQueryHandler'),
(r"/{pre}/{ver}/([^\/]+)/([^\/]+)/?", 'web.handlers.OutbreakBiothingHandler'),
(r"/{pre}/{ver}/([^\/]+)/?", 'web.handlers.OutbreakBiothingHandler')
]
# *****************************************************************************
# Query Pipeline
# *****************************************************************************
ES_QUERY_BUILDER = 'web.pipeline.QueryBuilder'
ALLOW_NESTED_AGGS = True
# *****************************************************************************
# Endpoint Specifics
# *****************************************************************************
ANNOTATION_DEFAULT_SCOPES = ['_id', '@id']
ANNOTATION_KWARGS = deepcopy(ANNOTATION_KWARGS)
ANNOTATION_KWARGS['*']['always_list']['default'] = ['creator.affiliation']
QUERY_KWARGS = deepcopy(QUERY_KWARGS)
QUERY_KWARGS['*']['always_list']['default'] = ['creator.affiliation']
TYPED_ANNOTATION_KWARGS = deepcopy(ANNOTATION_KWARGS)
TYPED_ANNOTATION_KWARGS['*']['_type'] = {'type': str, 'path': 0, 'group': 'esqb'}
TYPED_ANNOTATION_KWARGS['GET']['id']['path'] = 1
TYPED_QUERY_KWARGS = deepcopy(QUERY_KWARGS)
TYPED_QUERY_KWARGS['*']['_type'] = {'type': str, 'path': 0, 'group': 'esqb'}
<file_sep>#!/usr/bin/env python
import logging
import biothings
import config
from biothings.utils.version import set_versions
# from standalone.utils.version import set_standalone_version
# shut some mouths...
logging.getLogger("elasticsearch").setLevel(logging.ERROR)
logging.getLogger("urllib3").setLevel(logging.ERROR)
logging.getLogger("requests").setLevel(logging.ERROR)
logging.getLogger("tornado").setLevel(logging.ERROR)
logging.getLogger("botocore").setLevel(logging.ERROR)
# fill app & autohub versions
set_versions(config, ".")
# set_standalone_version(config, "standalone")
biothings.config_for_app(config)
# now use biothings' config wrapper
config = biothings.config
logging.info("Hub DB backend: %s", config.HUB_DB_BACKEND)
logging.info("Hub database: %s", config.DATA_HUB_DB_DATABASE)
from biothings.hub.dataindex.indexer import DynamicIndexerFactory
from biothings.hub.standalone import AutoHubServer
class OutBreakHubServer(AutoHubServer):
DEFAULT_FEATURES = AutoHubServer.DEFAULT_FEATURES + ["index", "api"]
server = OutBreakHubServer(source_list=None, name="Outbreak API Hub (frontend)",
api_config=None, dataupload_config=False, websocket_config=False
)
if __name__ == "__main__":
server.start()
<file_sep>from pprint import pprint
from elasticsearch import Elasticsearch
from elasticsearch.exceptions import TransportError
from tornado.options import options, parse_command_line
options.define('host', default="localhost:9200")
options.define('pattern', default="outbreak-resources-*")
if __name__ == "__main__":
parse_command_line()
try:
client = Elasticsearch(options.host)
client.ingest.put_pipeline('resources-common', {
"description": "compose date field",
"processors": [
{
"set": {
"field": "_timestamp",
"value": "{{_ingest.timestamp}}"
}
},
{
"script": {
"source": """
boolean validDate(def ctx, def field, def now) {
if (ctx.containsKey(field)) {
def date;
try {
date = LocalDate.parse(ctx[field], DateTimeFormatter.ISO_LOCAL_DATE);
}
catch(Exception e) {
return false;
}
if (date.isAfter(now)) {
return false;
}
ctx.date = ctx[field];
return true;
}
return false;
}
LocalDate now = LocalDate.parse(ctx._timestamp, DateTimeFormatter.ISO_ZONED_DATE_TIME);
if(validDate(ctx, 'dateModified', now)){ return }
if(validDate(ctx, 'datePublished', now)){ return }
if(validDate(ctx, 'dateCreated', now)){ return }
ctx.date = null;
""".replace('\n', ' ')
}
},
{
"remove": {
"field": "_timestamp"
}
}
]
})
client.indices.put_template("resources-common", {
"index_patterns": [
options.pattern
],
"settings": {
"index": {
"number_of_shards": "1",
"number_of_replicas": "0",
"default_pipeline": "resources-common"
}
},
"mappings": {},
"aliases": {}
})
except TransportError as exc:
pprint(exc.info, indent=4, width=500)
<file_sep># outbreak.api
The data API server for outbreak.info
[](https://zenodo.org/badge/latestdoi/248579616)
## To install dependecies
(recommend to setup a fresh Python venv first)
pip install -r requirements_web.txt
pip install -r requirements_hub.txt
## To start the dev server
(require to have ES running at `localhost:9200` by default)
python index.py --debug --conf=config_web
To override the default settings, create a `config_web_local.py` on the root folder and include extra settings.
<file_sep>-e git+https://github.com/biothings/biothings.api.git@0.12.x#egg=biothings[hub]
# outbreak_parser_tools is used in multiple data plugin parsers
-e git+https://github.com/outbreak-info/outbreak_parser_tools.git@main#egg=outbreak_parser_tools
# Pending to remove the following dependencies
attrs==21.2
requests-cache==0.7.4
fsspec==2021.11.1
<file_sep>import pandas as pd
from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import create_nested_mutation_query
class MostRecentDateHandler(BaseHandler):
field = "date_collected"
name = "most-recent-date"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"pangolin_lineage": {"type": str, "default": None},
"mutations": {"type": str, "default": None},
"location_id": {"type": str, "default": None},
}
async def _get(self):
query_pangolin_lineage = self.args.pangolin_lineage
query_location = self.args.location_id
query_mutations = self.args.mutations
query_mutations = query_mutations.split(",") if query_mutations is not None else []
query = {
"size": 0,
"query": {},
"aggs": {"date_collected": {"terms": {"field": self.field, "size": 10000}}},
}
query_pangolin_lineage = (
query_pangolin_lineage.split(",") if query_pangolin_lineage is not None else []
)
query_obj = create_nested_mutation_query(
lineages=query_pangolin_lineage, mutations=query_mutations, location_id=query_location
)
query["query"] = query_obj
resp = await self.asynchronous_fetch(query)
# print(resp)
path_to_results = ["aggregations", "date_collected", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
if len(buckets) == 0:
return {"success": True, "results": []}
flattened_response = []
for i in buckets:
if len(i["key"].split("-")) == 1 or "XX" in i["key"]:
continue
flattened_response.append({"date": i["key"], "date_count": i["doc_count"]})
df_response = (
pd.DataFrame(flattened_response)
.assign(
date=lambda x: pd.to_datetime(x["date"], format="%Y-%m-%d"),
date_count=lambda x: x["date_count"].astype(int),
)
.sort_values("date")
)
df_response = df_response.iloc[-1]
df_response.loc["date"] = df_response["date"].strftime("%Y-%m-%d")
df_response.loc["date_count"] = int(df_response["date_count"])
dict_response = df_response.to_dict()
resp = {"success": True, "results": dict_response}
return resp
class MostRecentCollectionDateHandler(MostRecentDateHandler):
field = "date_collected"
name = "most-recent-collection-date-by-location"
class MostRecentSubmissionDateHandler(MostRecentDateHandler):
field = "date_submitted"
name = "most-recent-submission-date-by-location"
<file_sep>#!/usr/bin/env python
import sys
import requests
import logging
import os
logging.basicConfig(filename='/var/log/nginx/cache_clear.log', level=logging.DEBUG)
sys.path.append('/home/ubuntu/outbreak.api')
from config_web_local import ES_HOST
from shutil import rmtree
from subprocess import run
from time import sleep
CURRENT_INDEX_FILE = '/home/ubuntu/.current_index_name.txt'
CACHE_DIRECTORY = '/var/lib/nginx/cache/'
def did_genomics_update(live_index):
if os.path.exists(CURRENT_INDEX_FILE):
with open(CURRENT_INDEX_FILE) as index_file:
index_name = index_file.read().strip()
else:
logging.warning(f'creating file {live_index}')
with open(CURRENT_INDEX_FILE, 'w') as index_file:
index_file.write(live_index)
return False
# indices are different: genomics changed
return live_index != index_name
def clear_nginx_cache():
DELAY = 3
subdirs = os.listdir(CACHE_DIRECTORY)
for subdir in subdirs:
cache_directory = os.path.join(CACHE_DIRECTORY, subdir)
rmtree(cache_directory)
sleep(DELAY)
def main():
# use aliases because there can be two genomics indices on the server
# but the aliased index is currently live
live_index = requests.get(f'http://{ES_HOST}/_cat/aliases/*genomics*?h=idx').text.strip()
if did_genomics_update(live_index):
with open(CURRENT_INDEX_FILE, 'w') as index_file:
logging.info(f'updating file {live_index}')
index_file.write(live_index)
logging.info('genomics updated, clearing cache')
clear_nginx_cache()
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(str(e))
<file_sep>from pyparsing import *
# Grammar
lineageTerm = Combine(Word(alphanums) + OneOrMore("." + Word(alphanums))).setName("lineage") | quotedString.setParseAction( removeQuotes )
mutationTerm = Combine(Word(alphanums) + ":" + Word(alphanums)).setName("mutation") | quotedString.setParseAction( removeQuotes )
# When a match is encountered return object of corresponding class
class Lineage(object):
def __init__(self, result):
self.value = result[0]
def generate(self):
return "pangolin_lineage:{}".format(self.value)
class Mutation(object):
def __init__(self, result):
self.value = result[0]
def generate(self):
return "mutation:{}".format(self.value)
lineageTerm.addParseAction(Lineage)
mutationTerm.addParseAction(Mutation)
searchTerm = OneOrMore(MatchFirst([mutationTerm, lineageTerm]))
# Arbitrary operator precedence for now
op_and = CaselessLiteral("and")
op_or = CaselessLiteral("or")
op_not = CaselessLiteral("not")
searchExpr = operatorPrecedence( searchTerm,
[
(op_and, 2, opAssoc.LEFT),
(op_or, 2, opAssoc.LEFT),
])
# Generate ES query_string. Needs validation.
def build_es_query_string(parsed_cond):
query_string = ""
for i in parsed_cond:
if isinstance(i, ParseResults):
query_string += "( {} )".format(build_es_query_string(i))
elif isinstance(i, str) and i in ["and", "or"]:
query_string += " {} ".format(i)
elif type(i).__name__ in ["Lineage", "Mutation"]:
query_string += "{}".format(i.generate())
return query_string
# Testing
tests = """\
(BA.1 and S:L452R) or B.1.617.2
(BA.1 and S:L452R and S:P681R) or B.1.617.2 or (S:D614G and S:P681R)
BA.1 and (S:L452R or S:P681R)
S:L452R and BA.1\
""".split("\n")
for t in tests:
print("\nQuery parameter: \n\"{}\"".format(t))
parsed_cond = searchExpr.parseString(t)
print("\nCorresponding ES query:\n\"{}\"\n".format(build_es_query_string(parsed_cond[0])))
print("----")
<file_sep>import abc
from biothings.web.handlers import BaseAPIHandler
from .gisaid_auth import gisaid_authorized
class BaseHandler(BaseAPIHandler):
__metaclass__ = abc.ABCMeta
kwargs = dict(BaseAPIHandler.kwargs)
def set_default_headers(self):
self.set_header("Content-Type", "application/json")
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "content-type,Authorization")
self.set_header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PATCH, PUT")
size = 10000
async def asynchronous_fetch(self, query):
query["track_total_hits"] = True
response = await self.biothings.elasticsearch.async_client.search(
index=self.biothings.config.genomics.ES_INDEX, body=query, size=0, request_timeout=90
)
return response
async def asynchronous_fetch_count(self, query):
query["track_total_hits"] = True
response = await self.biothings.elasticsearch.async_client.count(
index=self.biothings.config.genomics.ES_INDEX, body=query
)
return response
async def get_mapping(self):
response = await self.biothings.elasticsearch.async_client.indices.get_mapping(
index=self.biothings.config.genomics.ES_INDEX
)
return response
def post(self):
pass
async def get(self):
if not getattr(self.biothings.config, "DISABLE_GENOMICS_ENDPOINT", False):
await self._get_with_gisauth()
else:
resp = await self._get()
self.write(resp)
@gisaid_authorized
async def _get_with_gisauth(self):
resp = await self._get()
self.write(resp)
def _get(self):
raise NotImplementedError()
<file_sep>"""
Non-API handlers go here, e.g. the landing page and API page.
"""
import tornado.web
from tornado.escape import json_encode
from jinja2 import Environment, FileSystemLoader
templateLoader = FileSystemLoader(searchpath='web/templates')
templateEnv = Environment(loader=templateLoader, cache_size=0)
def get_api_list():
res = [
{
"_id": "covid19",
"config": {
"doc_type": "outbreak_info"
},
"description": "COVID19 live outbreak data",
"status": "running",
"link": False
},
{
"_id": "resources",
"config": {
"doc_type": "resource"
},
"description": "COVID19 collection of datasets, publications ,clinical trials, protocols, and more.",
"status": "running",
"link": False
},
{
"_id": "genomics",
"config": {
"doc_type": "mutation"
},
"description": "Provides access to the underlying genomic and epidemiology data on outbreak.info.",
"status": "running",
"link": "https://outbreak-info.github.io/R-outbreak-info/"
},
]
return res
class BaseHandler(tornado.web.RequestHandler):
def return_json(self, data):
_json_data = json_encode(data)
self.set_header("Content-Type", "application/json; charset=UTF-8")
self.write(_json_data)
class MainHandler(BaseHandler):
def get(self):
api_list = get_api_list()
index_file = "index.html"
index_template = templateEnv.get_template(index_file)
index_output = index_template.render(Context=json_encode({"List": api_list}))
self.write(index_output)
class ApiViewHandler(BaseHandler):
def get(self):
view_file = "try.html"
view_template = templateEnv.get_template(view_file)
view_output = view_template.render()
self.write(view_output)
class SpecialHandler(BaseHandler):
def get(self):
view_file = "try-resources.html"
view_template = templateEnv.get_template(view_file)
view_output = view_template.render()
self.write(view_output)
<file_sep>import datetime
from dateutil import parser as dtparser
from biothings import config
logger = config.logger
# This will add a new console command "auto_archive".
# It is a private command only accessible from the ssh console
def auto_archive(build_config_name, days=3, dryrun=True):
"""
Archive any builds which build date is older than today's date
by "days" day.
"""
logger.info("Auto-archive builds older than %s days" % days)
builds = lsmerge(build_config_name)
today = datetime.datetime.now().astimezone()
at_least_one = False
for bid in builds:
build = bm.build_info(bid)
try:
bdate = dtparser.parse(build["_meta"]["build_date"]).astimezone()
except KeyError:
logger.warning('Build "{}" missing "_meta" key.'.format(bid))
continue
deltadate = today - bdate
if deltadate.days > days:
logger.info("Archiving build %s (older than %s)" % (bid, deltadate))
if dryrun:
logger.info("This is a dryrun of \"archive(%s)\", no real changes were made.", bid)
else:
archive(bid)
at_least_one = True
if not at_least_one:
logger.info("Nothing to archive")
# the following line set the schedule to run it regularly in the event loop
# multiple schedules can be added for different build configs
schedule("0 17 * * *", auto_archive, "covid19", dryrun=False) # 5pm daily, 1am UTC
schedule("0 18 * * *", auto_archive, "litcovid", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "biorxiv", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "clinical_trials", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "pdb", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "figshare", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "protocolsio", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "dataverse", dryrun=False) # 6pm daily, 2am UTC
schedule("0 18 * * *", auto_archive, "genomics_api", dryrun=False, days=90) # 6pm daily, 2am UTC
# optionally, we can expose command as an API endpoint
# $ curl -XPUT localhost:19180/auto_archive/covid19
# or with additional parameters:
# $ curl -XPUT -d '{"days":25, "dryrun": false}' localhost:19180/auto_archive/covid19
expose(
endpoint_name="auto_archive",
command_name="auto_archive",
method="put"
)
<file_sep>[tool.black]
line-length = 100
target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
[tool.isort]
profile = "black"
combine_as_imports = true
line_length = 100
src_paths = ["."]
<file_sep>from web.handlers.genomics.base import BaseHandler
class MutationDetailsHandler(BaseHandler):
name = "mutation-details"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {"mutations": {"type": str, "default": None}}
async def _get(self):
mutations = self.args.mutations
mutations = mutations.split(",") if mutations is not None else []
query = {
"size": 0,
"aggs": {
"by_mutations": {
"nested": {"path": "mutations"},
"aggs": {
"inner": {
"filter": {
"bool": {
"should": [
{"match": {"mutations.mutation": i}} for i in mutations
]
}
},
"aggs": {
"by_name": {
"terms": {"field": "mutations.mutation"},
"aggs": {"by_nested": {"top_hits": {"size": 1}}},
}
},
}
},
}
},
}
resp = await self.asynchronous_fetch(query)
path_to_results = ["aggregations", "by_mutations", "inner", "by_name", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = []
for i in buckets:
for j in i["by_nested"]["hits"]["hits"]:
tmp = j["_source"]
for k in ["change_length_nt", "codon_num", "pos"]:
if k in tmp and tmp[k] != "None":
tmp[k] = int(float(tmp[k]))
flattened_response.append(tmp)
resp = {"success": True, "results": flattened_response}
return resp
<file_sep>from biothings.web.handlers import BiothingHandler, QueryHandler, MetadataSourceHandler
from .genomics.gisaid_auth import gisaid_authorized
# *****************************************************************************
# Resource Handlers
# *****************************************************************************
class OutbreakBiothingHandler(BiothingHandler):
name = 'typed_annotation'
class OutbreakQueryHandler(QueryHandler):
name = 'typed_query'
# *****************************************************************************
# Significance Handlers
# *****************************************************************************
class GRQueryHandler(QueryHandler):
@gisaid_authorized
async def get(self, *args, **kwargs):
await super().get(*args, **kwargs)
class GRMetadataSourceHandler(MetadataSourceHandler):
@gisaid_authorized
async def get(self, *args, **kwargs):
await super().get(*args, **kwargs)
<file_sep>import pandas as pd
from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import compute_rolling_mean, parse_location_id_to_query
class PrevalenceByAAPositionHandler(BaseHandler):
name = "prevalence-by-position"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"pangolin_lineage": {"type": str, "default": None},
"location_id": {"type": str, "default": None},
"name": {"type": str, "required": True}, # should be deprecated
"gene": {
"type": str,
"required": False,
}, # replacement of name parameter for validator applying
"position": {
"type": int,
"required": False,
}, # replacement of name parameter for validator applying
}
async def _get(self):
query_str = self.args.name
query_location = self.args.location_id
query_lineage = self.args.pangolin_lineage
query_gene = self.args.gene
if not query_gene:
query_gene = query_str.split(":")[0]
query_aa_position = self.args.position
if query_aa_position is None:
query_aa_position = int(query_str.split(":")[1])
# Get ref codon
query = {
"size": 0,
"aggs": {
"by_mutations": {
"nested": {"path": "mutations"},
"aggs": {
"inner": {
"filter": {
"bool": {
"must": [
{"match": {"mutations.codon_num": query_aa_position}},
{"match": {"mutations.gene": query_gene}},
]
}
},
"aggs": {"by_nested": {"top_hits": {"size": 1}}},
}
},
}
},
}
resp = await self.asynchronous_fetch(query)
tmp_ref = resp["aggregations"]["by_mutations"]["inner"]["by_nested"]["hits"]["hits"]
dict_response = []
if len(tmp_ref) > 0:
ref_aa = tmp_ref[0]["_source"]["ref_aa"]
query = {
"aggs": {
"by_date": {
"terms": {"field": "date_collected", "size": self.size},
"aggs": {
"by_mutations": {
"nested": {"path": "mutations"},
"aggs": {
"inner": {
"filter": {
"bool": {
"must": [
{
"match": {
"mutations.codon_num": query_aa_position
}
},
{"match": {"mutations.gene": query_gene}},
]
}
},
"aggs": {
"by_name": {"terms": {"field": "mutations.alt_aa"}}
},
}
},
}
},
}
}
}
if query_location is not None:
query["query"] = parse_location_id_to_query(
query_location, query["aggs"]["prevalence"]["filter"]
)
if query_lineage is not None:
if "query" in query:
query["query"]["bool"]["must"].append(
{"term": {"pangolin_lineage": query_lineage}}
)
else:
query["query"] = {"term": {"pangolin_lineage": query_lineage}}
resp = await self.asynchronous_fetch(query)
buckets = resp
path_to_results = ["aggregations", "by_date", "buckets"]
for i in path_to_results:
buckets = buckets[i]
flattened_response = []
for d in buckets:
alt_count = 0
for m in d["by_mutations"]["inner"]["by_name"]["buckets"]:
if m["key"] == "None":
continue
flattened_response.append(
{
"date": d["key"],
"total_count": d["doc_count"],
"aa": m["key"],
"aa_count": m["doc_count"],
}
)
alt_count += m["doc_count"]
flattened_response.append(
{
"date": d["key"],
"total_count": d["doc_count"],
"aa": ref_aa,
"aa_count": d["doc_count"] - alt_count,
}
)
df_response = (
pd.DataFrame(flattened_response)
.assign(
date=lambda x: pd.to_datetime(x["date"], format="%Y-%m-%d"),
prevalence=lambda x: x["aa_count"] / x["total_count"],
)
.sort_values("date")
)
df_response = df_response.groupby("aa").apply(
compute_rolling_mean, "date", "prevalence", "prevalence_rolling"
)
df_response.loc[:, "date"] = df_response["date"].apply(lambda x: x.strftime("%Y-%m-%d"))
dict_response = df_response.to_dict(orient="records")
resp = {"success": True, "results": dict_response}
return resp
<file_sep># Use this instead to test the latest fixes on 0.12.x branch before official release
# git+https://github.com/biothings/biothings.api@0.12.x#egg=biothings[web_extra]
biothings[web_extra]==0.12.2
pandas==1.4.3
pyjwt[crypto]==2.4.0
scipy==1.9.0
Jinja2==3.1.2
MarkupSafe==2.1.1
<file_sep>from . import covid19, genomics, growth_rate, resources, significance
try:
from config_web_local import *
except ImportError:
pass
<file_sep>from web.handlers.genomics.base import BaseHandler
class LineageHandler(BaseHandler):
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"name": {"type": str, "default": None},
"size": {"type": int, "default": None},
}
async def _get(self):
query_str = self.get_argument("name", None)
size = self.get_argument("size", None)
query = {
"size": 0,
"query": {"wildcard": {"pangolin_lineage": {"value": query_str}}},
"aggs": {"lineage": {"terms": {"field": "pangolin_lineage", "size": 10000}}},
}
resp = await self.asynchronous_fetch(query)
path_to_results = ["aggregations", "lineage", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = [{"name": i["key"], "total_count": i["doc_count"]} for i in buckets]
if size:
try:
size = int(size)
except Exception:
return {"success": False, "results": [], "errors": "Invalide size value"}
flattened_response = sorted(flattened_response, key=lambda x: -x["total_count"])
flattened_response = flattened_response[:size]
resp = {"success": True, "results": flattened_response}
return resp
<file_sep>import pandas as pd
from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import (
calculate_proportion,
create_date_range_filter,
create_nested_mutation_query,
parse_location_id_to_query,
parse_time_window_to_query,
)
class MutationsByLineage(BaseHandler):
name = "mutations-by-lineage"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"location_id": {"type": str, "default": None},
"mutations": {"type": str, "default": None},
"pangolin_lineage": {"type": str, "default": None},
"frequency": {"type": float, "default": 0, "min": 0, "max": 1},
"min_date": {"type": str, "default": None, "date_format": "%Y-%m-%d"},
"max_date": {"type": str, "default": None, "date_format": "%Y-%m-%d"},
}
async def _get(self):
query_location = self.args.location_id
query_mutations = self.args.mutations
query_pangolin_lineage = self.args.pangolin_lineage
query_mutations = (
[muts.split(",") for muts in query_mutations.split(" AND ")]
if query_mutations is not None
else []
)
query_frequency_threshold = self.args.frequency
results = {}
for (
muts
) in (
query_mutations
): # For multiple sets of mutations, create multiple ES queries. Since AND queries are possible doing one ES query with aggregations is cumbersome. Must look for better solution here.
query = {
"size": 0,
"aggs": {
"lineage": {
"terms": {"field": "pangolin_lineage", "size": self.size},
"aggs": {"mutations": {"filter": {}}},
}
},
}
if query_location is not None:
query["query"] = parse_location_id_to_query(query_location)
if query_pangolin_lineage is not None:
if "query" in query: # Only query added will be bool for location
query["query"]["bool"]["must"].append(
{"term": {"pangolin_lineage": query_pangolin_lineage}}
)
else:
query["query"] = {"term": {"pangolin_lineage": query_pangolin_lineage}}
query["aggs"]["lineage"]["aggs"]["mutations"]["filter"] = create_nested_mutation_query(
mutations=muts
)
date_range_filter = create_date_range_filter(
"date_collected", self.args.min_date, self.args.max_date
)
query_obj = parse_time_window_to_query(date_range_filter)
if query_obj:
query["query"] = query_obj
# import json
# print(json.dumps(query))
resp = await self.asynchronous_fetch(query)
# print(json.dumps(resp))
path_to_results = ["aggregations", "lineage", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = []
for i in buckets:
if not i["mutations"]["doc_count"] > 0 or i["key"] == "none":
continue
flattened_response.append(
{
"pangolin_lineage": i["key"],
"lineage_count": i["doc_count"],
"mutation_count": i["mutations"]["doc_count"],
}
)
if not flattened_response:
return {"success": True, "results": {self.args.mutations: []}}
df_response = pd.DataFrame(flattened_response)
if df_response.shape[0] > 0:
prop = calculate_proportion(
df_response["mutation_count"], df_response["lineage_count"]
)
df_response.loc[:, "proportion"] = prop[0]
df_response.loc[:, "proportion_ci_lower"] = prop[1]
df_response.loc[:, "proportion_ci_upper"] = prop[2]
df_response = df_response[df_response["proportion"] >= query_frequency_threshold]
results[",".join(muts)] = df_response.to_dict(orient="records")
resp = {"success": True, "results": results}
return resp
<file_sep>import datetime
import biothings.hub.databuild.mapper as mapper
def add_date(doc):
dates = []
if doc.get('date'):
dates.append(doc.get('date'))
if doc.get('dateCreated'):
dates.append(doc.get('dateCreated'))
if doc.get('dateModified'):
dates.append(doc.get('dateModified'))
if doc.get('datePublished'):
dates.append(doc.get('datePublished'))
if dates:
try:
dates.sort()
date = datetime.datetime.fromisoformat(dates[-1]).date().isoformat()
doc['date'] = date
except:
doc['date'] = None
return doc
class DateMapper(mapper.BaseMapper):
def load(self):
pass
def process(self, docs):
for doc in docs:
doc_with_date = add_date(doc)
yield doc_with_date
<file_sep>import re
import pandas as pd
from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import create_nested_mutation_query, get_total_hits
class LineageMutationsHandler(BaseHandler):
gene_mapping = {
"orf1a": "ORF1a",
"orf1b": "ORF1b",
"s": "S",
"orf3a": "ORF3a",
"e": "E",
"m": "M",
"orf6": "ORF6",
"orf7a": "ORF7a",
"orf7b": "ORF7b",
"orf8": "ORF8",
"n": "N",
"orf10": "ORF10",
}
name = "lineage-mutations"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"pangolin_lineage": {"type": str, "required": True},
"frequency": {"type": float, "default": 0.8, "min": 0, "max": 1},
"gene": {"type": str, "default": None},
}
async def _get(self):
pangolin_lineage = self.args.pangolin_lineage
frequency = self.args.frequency
gene = self.args.gene
if gene:
genes = gene.lower().split(",")
else:
genes = []
dict_response = {}
# Query structure: Lineage 1 OR Lineage 2 OR Lineage 3 AND Mutation 1 AND Mutation 2, Lineage 4 AND Mutation 2, Lineage 5 ....
for query_lineage in pangolin_lineage.split(","):
query = {
"size": 0,
"query": {},
"aggs": {
"mutations": {
"nested": {"path": "mutations"},
"aggs": {
"mutations": {
"terms": {"field": "mutations.mutation", "size": 10000},
"aggs": {"genomes": {"reverse_nested": {}}},
}
},
}
},
}
query_lineage_split = query_lineage.split(" AND ")
query_mutations = []
query_pangolin_lineage = query_lineage_split[0].split(
" OR "
) # First parameter always lineages separated by commas
if len(query_lineage_split) > 1:
query_mutations = query_lineage_split[1:] # First parameter is always lineage
query["query"] = create_nested_mutation_query(
lineages=query_pangolin_lineage, mutations=query_mutations
)
# print(query)
resp = await self.asynchronous_fetch(query)
path_to_results = ["aggregations", "mutations", "mutations", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = [
{
"mutation": i["key"],
"mutation_count": i["genomes"]["doc_count"],
"lineage_count": get_total_hits(resp),
"lineage": query_lineage,
}
for i in buckets
]
if len(flattened_response) > 0:
df_response = pd.DataFrame(flattened_response).assign(
gene=lambda x: x["mutation"].apply(
lambda k: self.gene_mapping[k.split(":")[0]]
if k.split(":")[0] in self.gene_mapping
else k.split(":")[0]
),
ref_aa=lambda x: x["mutation"]
.apply(
lambda k: re.findall("[A-Za-z*]+", k.split(":")[1])[0]
if "DEL" not in k and "del" not in k and "_" not in k
else k
)
.str.upper(),
alt_aa=lambda x: x["mutation"]
.apply(
lambda k: re.findall("[A-Za-z*]+", k.split(":")[1])[1]
if "DEL" not in k and "del" not in k and "_" not in k
else k.split(":")[1]
)
.str.upper(),
codon_num=lambda x: x["mutation"].apply(
lambda k: int(re.findall("[0-9]+", k.split(":")[1])[0])
),
codon_end=lambda x: x["mutation"].apply(
lambda k: int(re.findall("[0-9]+", k.split(":")[1])[1])
if "/" in k and ("DEL" in k or "del" in k)
else None
),
type=lambda x: x["mutation"].apply(
lambda k: "deletion" if "DEL" in k or "del" in k else "substitution"
),
)
df_response = df_response[df_response["ref_aa"] != df_response["alt_aa"]]
df_response.loc[:, "prevalence"] = (
df_response["mutation_count"] / df_response["lineage_count"]
)
df_response.loc[~df_response["codon_end"].isna(), "change_length_nt"] = (
(df_response["codon_end"] - df_response["codon_num"]) + 1
) * 3
df_response = df_response[df_response["prevalence"] >= frequency].fillna("None")
if genes:
df_response = df_response[df_response["gene"].str.lower().isin(genes)]
dict_response[query_lineage] = df_response.to_dict(orient="records")
resp = {"success": True, "results": dict_response}
return resp
<file_sep># ######### #
# HUB VARS #
# ######### #
# ES s3 repository to use snapshot/restore (must be pre-configured in ES)
SNAPSHOT_REPOSITORY = "outbreak_repository"
# Pre-prod/test ES definitions
INDEX_CONFIG = {
#"build_config_key" : None, # used to select proper idxr/syncer
"indexer_select": {
# default
#None : "path.to.special.Indexer",
},
"env" : {
"test": {
"host": "localhost:9200",
"indexer": {
"args": {
"timeout": 300,
"retry_on_timeout": True,
"max_retries": 10,
},
},
"index": [],
}
},
}
# Snapshot environment configuration
SNAPSHOT_CONFIG = {}
RELEASE_CONFIG = {}
# SSH port for hub console
HUB_SSH_PORT = 19122
HUB_API_PORT = 19180
READONLY_HUB_API_PORT = 19181
# Hub name/icon url/version, for display purpose
HUB_NAME = "OutBreak API (backend)"
HUB_ICON = "https://outbreak.info/assets/icon-01-d7c2932d.svg"
HUB_VERSION = "master"
USE_RELOADER = False
STANDALONE_AWS_CREDENTIALS = {
"AWS_ACCESS_KEY_ID": "",
"AWS_SECRET_ACCESS_KEY": "",
}
STANDALONE_CONFIG = {
# default config
"_default": {
"es_host" : "localhost:9200",
"index" : "outbreak-covid19-dev",
},
"outbreak-covid19" : {
"es_host": "prodserver:9200",
"index": "outbreak-covid19",
}
}
########################################
# APP-SPECIFIC CONFIGURATION VARIABLES #
########################################
# The following variables should or must be defined in your
# own application. Create a config.py file, import that config_common
# file as:
#
# from config_hub import *
#
# then define the following variables to fit your needs. You can also override any
# any other variables in this file as required. Variables defined as ValueError() exceptions
# *must* be defined
#
# S3 bucket, root of all biothings releases information
S3_RELEASE_BUCKET = "biothings-releases"
# bucket/folder containing releases
S3_DIFF_BUCKET = "biothings-diffs"
# what sub-folder should be used within diff bucket to upload diff files
S3_APP_FOLDER = "pending" # actual pending datasource name will be appended
TORNADO_SETTINGS = {
# max 10GiB upload
"max_buffer_size" : 10*1024*1024*1024,
}
STANDALONE_VERSION = {"branch" : "standalone_v3"}
# List of versions.json URLs, Hub will handle these as sources for data releases
VERSION_URLS = []
<file_sep>from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import parse_location_id_to_query
class SubmissionLagHandler(BaseHandler):
name = "collection-submission"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"location_id": {"type": str, "default": None},
}
async def _get(self):
query_location = self.args.location_id
query = {
"aggs": {
"date_collected_submitted_buckets": {
"composite": {
"size": 10000,
"sources": [
{"date_collected": {"terms": {"field": "date_collected"}}},
{"date_submitted": {"terms": {"field": "date_submitted"}}},
],
}
}
}
}
if query_location is not None:
query["query"] = parse_location_id_to_query(query_location)
resp = await self.asynchronous_fetch(query)
path_to_results = ["aggregations", "date_collected_submitted_buckets", "buckets"]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
while "after_key" in resp["aggregations"]["date_collected_submitted_buckets"]:
query["aggs"]["date_collected_submitted_buckets"]["composite"]["after"] = resp[
"aggregations"
]["date_collected_submitted_buckets"]["after_key"]
resp = await self.asynchronous_fetch(query)
buckets.extend(resp["aggregations"]["date_collected_submitted_buckets"]["buckets"])
flattened_response = [
{
"date_collected": i["key"]["date_collected"],
"date_submitted": i["key"]["date_submitted"],
"total_count": i["doc_count"],
}
for i in buckets
]
resp = {"success": True, "results": flattened_response}
return resp
<file_sep>from datetime import datetime as dt, timedelta
import pandas as pd
from web.handlers.genomics.base import BaseHandler
from web.handlers.genomics.util import (
compute_rolling_mean_all_lineages,
compute_total_count,
create_date_range_filter,
expand_dates,
get_major_lineage_prevalence,
parse_location_id_to_query,
parse_time_window_to_query,
)
class PrevalenceAllLineagesByLocationHandler(BaseHandler):
# size = 100 # If size=1000 it will raise too_many_buckets_exception in case missing location_id in query.
name = "prevalence-by-location-all-lineages"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"location_id": {"type": str, "default": None},
"window": {"type": int, "default": None, "min": 1},
"other_threshold": {"type": float, "default": 0.05, "min": 0, "max": 1},
"nday_threshold": {"type": int, "default": 10, "min": 1},
"ndays": {"type": int, "default": 180, "min": 1},
"other_exclude": {"type": str, "default": None},
"cumulative": {"type": bool, "default": False},
"min_date": {"type": str, "default": None, "date_format": "%Y-%m-%d"},
"max_date": {"type": str, "default": None, "date_format": "%Y-%m-%d"},
}
async def _get(self):
query_location = self.args.location_id
query_window = self.args.window
query_other_threshold = self.args.other_threshold
query_nday_threshold = self.args.nday_threshold
query_ndays = self.args.ndays
query_other_exclude = self.args.other_exclude
query_other_exclude = (
query_other_exclude.split(",") if query_other_exclude is not None else []
)
query_cumulative = self.args.cumulative
query = {
"size": 0,
"aggs": {
"count": {
"terms": {"field": "date_collected", "size": self.size},
"aggs": {
"lineage_count": {"terms": {"field": "pangolin_lineage", "size": self.size}}
},
}
},
}
query_obj = parse_location_id_to_query(query_location)
date_range_filter = create_date_range_filter(
"date_collected", self.args.min_date, self.args.max_date
)
query_obj = parse_time_window_to_query(date_range_filter, query_obj=query_obj)
if query_obj:
query["query"] = query_obj
# import json
# print(json.dumps(query))
resp = await self.asynchronous_fetch(query)
buckets = resp
path_to_results = ["aggregations", "count", "buckets"]
for i in path_to_results:
buckets = buckets[i]
flattened_response = []
for i in buckets:
if len(i["key"].split("-")) == 1 or "XX" in i["key"]:
continue
for j in i["lineage_count"]["buckets"]:
flattened_response.append(
{
"date": i["key"],
"total_count": i["doc_count"],
"lineage_count": j["doc_count"],
"lineage": j["key"],
}
)
if not flattened_response:
return {"success": True, "results": []}
df_response = (
pd.DataFrame(flattened_response)
.assign(
date=lambda x: pd.to_datetime(x["date"], format="%Y-%m-%d"),
prevalence=lambda x: x["lineage_count"] / x["total_count"],
)
.sort_values("date")
)
if (
query_window is not None and not date_range_filter
): # discard query_window if either max_date or min_date exists
df_response = df_response[
df_response["date"] >= (dt.now() - timedelta(days=query_window))
]
df_response = get_major_lineage_prevalence(
df_response,
"date",
query_other_exclude,
query_other_threshold,
query_nday_threshold,
query_ndays,
)
if not query_cumulative:
df_response = (
df_response.groupby("lineage", group_keys=True)
.apply(
compute_rolling_mean_all_lineages,
"date",
"lineage_count",
"lineage_count_rolling",
"lineage",
)
.reset_index()
)
df_response = df_response.groupby("date").apply(
compute_total_count, "lineage_count_rolling", "total_count_rolling"
)
df_response.loc[:, "prevalence_rolling"] = (
df_response["lineage_count_rolling"] / df_response["total_count_rolling"]
)
df_response.loc[
df_response["prevalence_rolling"].isna(), "prevalence_rolling"
] = 0 # Prevalence is 0 if total_count_rolling == 0.
df_response.loc[:, "date"] = df_response["date"].apply(lambda x: x.strftime("%Y-%m-%d"))
df_response = df_response.fillna("None")
df_response = df_response[
[
"date",
"total_count",
"lineage_count",
"lineage",
"prevalence",
"prevalence_rolling",
]
]
else:
df_response = (
df_response.groupby("lineage")
.apply(
expand_dates,
df_response["date"].min(),
df_response["date"].max(),
"date",
"lineage",
)
.reset_index()
)
df_response = (
df_response.groupby("date")
.apply(compute_total_count, "lineage_count", "total_count")
.reset_index()
)
df_response = (
df_response.groupby("lineage")
.agg({"total_count": "sum", "lineage_count": "sum"})
.reset_index()
)
df_response.loc[:, "prevalence"] = (
df_response["lineage_count"] / df_response["total_count"]
)
resp = {"success": True, "results": df_response.to_dict(orient="records")}
return resp
<file_sep>from elasticsearch_dsl import Search, Q
from biothings.web.query import ESQueryBuilder
class QueryBuilder(ESQueryBuilder):
'''
Assign weights to different fields.
'''
def default_string_query(self, q, options):
query = {
"query": {
"query_string": {
"query": q,
"fields": [
"name^4",
"interventions.name^3",
"description",
"all"
],
"default_operator": "AND"
}
}
}
search = Search()
search = search.update_from_dict(query)
return search
def apply_extras(self, search, options):
search = super().apply_extras(search, options)
immport = Q('term', **{ "curatedBy.name": "ImmPort" })
health_condition = Q('term', **{ "healthCondition.name": "covid-19" })
dockstore = Q('terms', **{ 'curatedBy.name': [ 'Dockstore', 'biotools' ]})
topic_cat = Q('term', **{ 'topicCategory.name': 'COVID-19' })
other = ~Q('terms', **{ 'curatedBy.name': ['ImmPort', 'Dockstore', 'biotools', 'Zenodo'] })
zenodo = Q('term', **{ 'curatedBy.name': 'Zenodo'})
keyword = Q('term', **{ 'keywords': 'COVID-19'})
search = search.query('bool', filter=[(immport & health_condition) | (dockstore & topic_cat) | (zenodo & keyword) | other])
if options._type:
search = search.filter('term', **{'@type': options._type})
return search
<file_sep>from web.handlers.genomics.base import BaseHandler
class MutationHandler(BaseHandler):
name = "mutations"
kwargs = dict(BaseHandler.kwargs)
kwargs["GET"] = {
"name": {"type": str, "required": True},
}
async def _get(self):
query_str = self.args.name
query = {
"size": 0,
"aggs": {
"mutations": {
"nested": {"path": "mutations"},
"aggs": {
"mutation_filter": {
"filter": {"wildcard": {"mutations.mutation": {"value": query_str}}},
"aggs": {
"count_filter": {
"terms": {"field": "mutations.mutation", "size": 10000}
}
},
}
},
}
},
}
resp = await self.asynchronous_fetch(query)
path_to_results = [
"aggregations",
"mutations",
"mutation_filter",
"count_filter",
"buckets",
]
buckets = resp
for i in path_to_results:
buckets = buckets[i]
flattened_response = [{"name": i["key"], "total_count": i["doc_count"]} for i in buckets]
resp = {"success": True, "results": flattened_response}
return resp
<file_sep>import json
import logging
from pprint import pprint
import requests
from elasticsearch import Elasticsearch
from elasticsearch.exceptions import TransportError
from elasticsearch_dsl import Search
from tornado.options import options, parse_command_line
options.define('host', default="api.outbreak.info:9200")
options.define('pattern', default="outbreak-resources-*")
options.define('url', default="https://raw.githubusercontent.com/outbreak-info/outbreak_preprint_matcher/main/results/update%20dumps/update_file.json")
def main():
parse_command_line()
try:
client = Elasticsearch(options.host)
# with open("update_file_initial.json") as file:
# updates = json.load(file)
updates = requests.get(options.url).json()
for update in updates:
search = Search().using(client).query("match", _id=update['_id'])
response = search.execute()
if response.hits.total == 1:
_index = response.hits.hits[0]['_index']
_type = response.hits.hits[0]['_type']
_correction = update['correction']
assert isinstance(_correction, list)
if 'correction' in response.hits.hits[0]:
if isinstance(response.hits.hits[0]['correction'], list):
_correction += response.hits.hits[0]['correction']
else:
_correction.append(response.hits.hits[0]['correction'])
#-----------------------------------------------------------------------------------#
client.update(_index, doc_type=_type, id=update['_id'], body={'doc': {
'correction': _correction
}})
#-----------------------------------------------------------------------------------#
else:
logging.error("id not unique.")
except (TransportError, requests.exceptions.RequestException) as exc:
logging.error(exc.info)
if __name__ == "__main__":
main()
<file_sep>import requests
from datetime import datetime
from tests.secret import auth_token
def _get_endpoint(endpoint, prefix='genomics/', host="https://test.outbreak.info/"):
url = f'{host}{prefix}{endpoint}'
if 'genomics' in prefix:
headers = {"Authorization": f"Bearer {auth_token}"}
else:
headers = None
r = requests.get(url, headers=headers)
#raise Exception(f"{r.json()}")
#
# raise Exception(f"{r.json()['results'][0]}")
return r
def _test_success(res_json, url):
assert res_json is not None, f"{url} did not return response"
assert res_json.get('success'), f"{url} unsuccessful"
assert res_json['success'] == True, f"{url} unsuccessful"
def _test_results(res_json, url):
"""
tests that the api returns successfully with one or more results,
returns the first result for convenience
"""
assert res_json.get('results'), f"{url} no results"
assert len(res_json['results']), f"{url} no results"
return res_json['results'][0]
def _test_date(result, url):
assert datetime.strptime(result['date'], '%Y-%m-%d'), f"{url} date not modified YYYY-MM-DD"
def _test_total_count(result, url):
assert type(result['total_count']) is int, f"{url} total_count not an int"
def _generic_api_test(url):
res = _get_endpoint(url)
res_json = res.json()
_test_success(res_json, url)
result = _test_results(res_json, url)
return result
<file_sep>#!/usr/bin/env python
import logging
import os
from functools import partial
import biothings
from biothings.utils.version import set_versions
import config
logging.getLogger("requests").setLevel(logging.ERROR)
app_folder, _src = os.path.split(os.path.split(os.path.abspath(__file__))[0])
set_versions(config, app_folder)
from biothings.hub import HubServer
import hub.dataload.sources
#outbreak_features = getattr(config, "HUB_FEATURES", None)
class OutBreakHubServer(HubServer):
#DEFAULT_FEATURES = config.HUB_FEATURES
def configure_build_manager(self):
# set specific managers
import biothings.hub.databuild.builder as builder
from hub.databuild.builder import ResourcesBuilder
build_manager = builder.BuilderManager(builder_class=ResourcesBuilder,job_manager=self.managers["job_manager"])
build_manager.configure()
build_manager.poll()
self.managers["build_manager"] = build_manager
server = OutBreakHubServer(hub.dataload.sources, name=config.HUB_NAME)
if __name__ == "__main__":
config.logger.info("Hub DB backend: %s", biothings.config.HUB_DB_BACKEND)
config.logger.info("Hub database: %s", biothings.config.DATA_HUB_DB_DATABASE)
server.start()
<file_sep>import biothings.hub.databuild.builder as builder
from hub.databuild.mapper import DateMapper
class ResourcesBuilder(builder.DataBuilder):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.mappers = {None: DateMapper(name="resources")}
<file_sep>API_PREFIX = "genomics"
ES_INDEX = "outbreak-genomics"
API_VERSION = "v2"
APP_LIST_V2 = [
(
r"/{pre}/{ver}/lineage-mutations",
"web.handlers.v2.genomics.lineage_mutations.LineageMutationsHandler",
),
(r"/{pre}/{ver}/lineage", "web.handlers.v2.genomics.LineageHandler"),
(r"/{pre}/{ver}/location", "web.handlers.v2.genomics.LocationHandler"),
(
r"/{pre}/{ver}/prevalence-by-location",
"web.handlers.v2.genomics.PrevalenceByLocationAndTimeHandler",
),
(
r"/{pre}/{ver}/prevalence-by-location-all-lineages",
"web.handlers.v2.genomics.PrevalenceAllLineagesByLocationHandler",
),
(
r"/{pre}/{ver}/prevalence-by-position",
"web.handlers.v2.genomics.PrevalenceByAAPositionHandler",
),
(
r"/{pre}/{ver}/mutation-details",
"web.handlers.v2.genomics.MutationDetailsHandler",
),
(
r"/{pre}/{ver}/collection-submission",
"web.handlers.v2.genomics.SubmissionLagHandler",
),
(
r"/{pre}/{ver}/mutations",
"web.handlers.v2.genomics.MutationHandler",
),
(
r"/{pre}/{ver}/mutations-by-lineage",
"web.handlers.v2.genomics.MutationsByLineage",
),
(
r"/{pre}/{ver}/location-lookup",
"web.handlers.v2.genomics.LocationDetailsHandler",
),
(
r"/{pre}/{ver}/most-recent-collection-date-by-location",
"web.handlers.v2.genomics.MostRecentCollectionDateHandler",
),
(
r"/{pre}/{ver}/most-recent-submission-date-by-location",
"web.handlers.v2.genomics.MostRecentSubmissionDateHandler",
),
(
r"/{pre}/{ver}/sequence-count",
"web.handlers.v2.genomics.SequenceCountHandler",
),
(
r"/{pre}/{ver}/lineage-by-sub-admin-most-recent",
"web.handlers.v2.genomics.CumulativePrevalenceByLocationHandler",
),
]
APP_LIST_SWITCHED_TO_V2 = [
(
r"/{pre}/lineage-mutations",
"web.handlers.v2.genomics.lineage_mutations.LineageMutationsHandler",
),
(r"/{pre}/lineage", "web.handlers.v2.genomics.LineageHandler"),
(r"/{pre}/location", "web.handlers.v2.genomics.LocationHandler"),
(
r"/{pre}/prevalence-by-location",
"web.handlers.v2.genomics.PrevalenceByLocationAndTimeHandler",
),
(
r"/{pre}/prevalence-by-location-all-lineages",
"web.handlers.v2.genomics.PrevalenceAllLineagesByLocationHandler",
),
]
APP_LIST_v1 = [
(r"/{pre}/v1/lineage-mutations", "web.handlers.genomics.LineageMutationsHandler"),
(r"/{pre}/v1/lineage", "web.handlers.genomics.LineageHandler"),
(r"/{pre}/v1/location", "web.handlers.genomics.LocationHandler"),
(
r"/{pre}/v1/prevalence-by-location",
"web.handlers.genomics.PrevalenceByLocationAndTimeHandler",
),
(
r"/{pre}/v1/prevalence-by-location-all-lineages",
"web.handlers.genomics.PrevalenceAllLineagesByLocationHandler",
),
]
APP_LIST_ORIGIN = [
(r"/{pre}/lineage-by-country", "web.handlers.genomics.lineage.LineageByCountryHandler"),
(r"/{pre}/lineage-by-division", "web.handlers.genomics.LineageByDivisionHandler"),
(r"/{pre}/lineage-and-country", "web.handlers.genomics.LineageAndCountryHandler"),
(r"/{pre}/lineage-and-division", "web.handlers.genomics.LineageAndDivisionHandler"),
(r"/{pre}/sequence-count", "web.handlers.genomics.SequenceCountHandler"),
(r"/{pre}/global-prevalence", "web.handlers.genomics.GlobalPrevalenceByTimeHandler"),
(r"/{pre}/prevalence-by-position", "web.handlers.genomics.PrevalenceByAAPositionHandler"),
(
r"/{pre}/lineage-by-sub-admin-most-recent",
"web.handlers.genomics.CumulativePrevalenceByLocationHandler",
),
(
r"/{pre}/most-recent-collection-date-by-location",
"web.handlers.genomics.MostRecentCollectionDateHandler",
),
(
r"/{pre}/most-recent-submission-date-by-location",
"web.handlers.genomics.MostRecentSubmissionDateHandler",
),
(r"/{pre}/mutation-details", "web.handlers.genomics.MutationDetailsHandler"),
(r"/{pre}/mutations-by-lineage", "web.handlers.genomics.MutationsByLineage"),
(r"/{pre}/collection-submission", "web.handlers.genomics.SubmissionLagHandler"),
(r"/{pre}/location-lookup", "web.handlers.genomics.LocationDetailsHandler"),
(r"/{pre}/mutations", "web.handlers.genomics.MutationHandler"),
(r"/{pre}/metadata", "web.handlers.genomics.MetadataHandler"),
(r"/{pre}/gisaid-id-lookup", "web.handlers.genomics.GisaidIDHandler"),
(r"/{pre}/get-auth-token", "web.handlers.genomics.GISAIDTokenHandler"),
]
APP_LIST = [
*APP_LIST_SWITCHED_TO_V2,
*APP_LIST_V2,
*APP_LIST_v1,
*APP_LIST_ORIGIN,
]
<file_sep># flake8: noqa
from .cumulative_prevalence_by_location import CumulativePrevalenceByLocationHandler
from .lineage import LineageHandler
from .lineage_mutations import LineageMutationsHandler
from .location import LocationHandler
from .location_details import LocationDetailsHandler
from .most_recent_date import MostRecentCollectionDateHandler, MostRecentSubmissionDateHandler
from .mutation_details import MutationDetailsHandler
from .mutations import MutationHandler
from .mutations_by_lineage import MutationsByLineage
from .prevalence_all_lineages_by_location import PrevalenceAllLineagesByLocationHandler
from .prevalence_by_aa_position import PrevalenceByAAPositionHandler
from .prevalence_by_location_and_time import PrevalenceByLocationAndTimeHandler
from .sequence_count import SequenceCountHandler
from .submission_lag import SubmissionLagHandler
<file_sep>import pytest
import json
from tests.util import endpoints
def test_mini_api():
with open('tests/mini_api/responses.json', 'r') as responses_json:
responses = json.load(responses_json)
for url, saved_response in responses.items():
live_response = endpoints._get_endpoint(url, server='test.outbreak.info').json()
assert saved_response == live_response
<file_sep># *****************************************************************************
# Elasticsearch variables
# *****************************************************************************
ES_INDEX = "outbreak-growth_rate"
API_PREFIX = "growth_rate"
ES_DOC_TYPE = "growth_rate"
API_VERSION = ""
APP_LIST = [
(r"/{pre}/{ver}/metadata/?", "web.handlers.GRMetadataSourceHandler"),
(r"/{pre}/{ver}/query/?", "web.handlers.GRQueryHandler"),
]
<file_sep>from datetime import timedelta, datetime as dt
from scipy.stats import beta
import pandas as pd
def calculate_proportion(_x, _n):
x = _x.round()
n = _n.round()
ci_low, ci_upp = beta.interval(1 - 0.05, x + 0.5, n - x + 0.5) # Jeffreys Interval
est_proportion = _x/_n
return est_proportion, ci_low, ci_upp
def compute_total_count(df, col, new_col):
df.loc[:,new_col] = df[col].sum()
return df
def expand_dates(df, date_min, date_max, index_col, grp_col):
idx = pd.date_range(date_min, date_max)
df = (
df
.set_index(index_col)
.reindex(idx, fill_value = 0)
.drop(grp_col, axis = 1)
.reset_index()
.rename(
columns = {
"index": "date"
}
)
)
return df
def compute_rolling_mean_all_lineages(df, index_col, col, new_col, grp_col):
idx = pd.date_range(df[index_col].min(), df[index_col].max())
df = (
df
.set_index(index_col)
.reindex(idx, fill_value = 0)
.assign(**{
new_col: lambda x: x[col].rolling("7d").mean()
})
.drop(grp_col, axis = 1)
.reset_index()
.rename(
columns = {
"index": "date"
}
)
)
return df
def compute_rolling_mean(df, index_col, col, new_col):
df = (
df
.set_index(index_col)
.assign(**{new_col: lambda x: x[col].rolling("7d").mean()})
.reset_index()
)
return df
def transform_prevalence(resp, path_to_results = [], cumulative = False):
buckets = resp
for i in path_to_results:
buckets = buckets[i]
if len(buckets) == 0:
return {"success": True, "results": {}}
flattened_response = [{
"date": i["key"],
"total_count": i["doc_count"],
"lineage_count": i["lineage_count"]["doc_count"]
} for i in buckets if len(i["key"].split("-")) > 1 and "XX" not in i["key"]]
df_response = (
pd.DataFrame(flattened_response)
.assign(date = lambda x: pd.to_datetime(x["date"], format="%Y-%m-%d"))
.sort_values("date")
)
first_date = df_response[df_response["lineage_count"] > 0]["date"].min()
dict_response = {}
if not cumulative:
df_response = df_response[df_response["date"] >= first_date - pd.to_timedelta(6, unit='d')] # Go back 6 days for total_rolling
df_response = compute_rolling_mean(df_response, "date", "total_count", "total_count_rolling")
df_response = compute_rolling_mean(df_response, "date", "lineage_count", "lineage_count_rolling")
df_response = df_response[df_response["date"] >= first_date] # Revert back to first date after total_rolling calculations are complete
d = calculate_proportion(df_response["lineage_count_rolling"], df_response["total_count_rolling"])
df_response.loc[:, "proportion"] = d[0]
df_response.loc[:, "proportion_ci_lower"] = d[1]
df_response.loc[:, "proportion_ci_upper"] = d[2]
df_response.loc[:,"date"] = df_response["date"].apply(lambda x: x.strftime("%Y-%m-%d"))
dict_response = df_response.to_dict(orient="records")
else: # For cumulative only calculate cumsum prevalence
df_response = df_response[df_response["date"] >= first_date]
if df_response.shape[0] == 0:
dict_response = {
"global_prevalence": 0,
"total_count": 0,
"lineage_count": 0,
"first_detected": None,
"last_detected": None
}
else:
lineage_cumsum = int(df_response["lineage_count"].sum())
total_cumsum = int(df_response["total_count"].sum())
df_date_sorted = df_response[df_response["lineage_count"] > 0].sort_values("date")
dict_response = {
"global_prevalence": lineage_cumsum/total_cumsum,
"total_count": total_cumsum,
"lineage_count": lineage_cumsum,
"first_detected": df_date_sorted["date"].iloc[0].strftime("%Y-%m-%d"),
"last_detected": df_date_sorted["date"].iloc[-1].strftime("%Y-%m-%d")
}
return dict_response
def compute_cumulative(grp, cols):
grp = grp.sort_values("date")
if grp.shape[0] != 0:
for i in cols:
grp.loc[:, "cum_{}".format(i)] = grp[i].cumsum()
grp.loc[:, "cum_{}".format(i)] = grp[i].cumsum()
return grp.tail(1)
else:
for i in cols:
if i == "total_count":
grp.loc[:, "cum_total_count"] = grp["total_count"].cumsum()
else:
grp.loc[:, "cum_{}".format(i)] = 0
return grp.tail(1)
def transform_prevalence_by_location_and_tiime(flattened_response, ndays = None, query_detected = False):
df_response = (
pd.DataFrame(flattened_response)
.assign(date = lambda x: pd.to_datetime(x["date"], format="%Y-%m-%d"))
.sort_values("date")
)
grps = []
dict_response = {}
if not query_detected:
if ndays is not None:
date_limit = dt.today() - timedelta(days = ndays)
df_response = df_response[df_response["date"] >= date_limit]
if df_response.shape[0] == 0:
return []
df_response = df_response.groupby("name").apply(compute_cumulative, ["total_count", "lineage_count"])
df_response.loc[:,"date"] = df_response["date"].apply(lambda x: x.strftime("%Y-%m-%d"))
d = calculate_proportion(df_response["cum_lineage_count"], df_response["cum_total_count"])
df_response.loc[:, "proportion"] = d[0]
df_response.loc[:, "proportion_ci_lower"] = d[1]
df_response.loc[:, "proportion_ci_upper"] = d[2]
dict_response = df_response.to_dict(orient="records")
else:
dict_response = {
"names": df_response[df_response["lineage_count"] > 0]["name"].unique().tolist()
}
return dict_response
def create_nested_mutation_query(location_id = None, lineages = [], mutations = []):
# For multiple lineages and mutations: (Lineage 1 AND mutation 1 AND mutation 2..) OR (Lineage 2 AND mutation 1 AND mutation 2..) ...
query_obj = {
"bool": {
"should": []
}
}
bool_should = []
for i in lineages:
bool_must = {
"bool": {
"must": []
}
}
bool_must["bool"]["must"].append({
"term": {
"pangolin_lineage": i
}
})
bool_should.append(bool_must)
bool_mutations = []
for i in mutations:
bool_mutations.append({
"nested": {
"path": "mutations",
"query": {
"term" : { "mutations.mutation" : i }
}
}
})
if len(bool_mutations) > 0: # If mutations specified
if len(bool_should) > 0: # If lineage and mutations specified
for i in bool_should:
i["bool"]["must"].extend(bool_mutations)
query_obj["bool"]["should"] = bool_should
else: # If only mutations are specified
query_obj = {
"bool": {
"must": bool_mutations
}
}
else: # If only lineage specified
query_obj["bool"]["should"] = bool_should
parse_location_id_to_query(location_id, query_obj)
return query_obj
def classify_other_category(grp, keep_lineages):
grp.loc[(~grp["lineage"].isin(keep_lineages)) | (grp["lineage"] == "none"), "lineage"] = "other" # Temporarily remove none. TODO: Proper fix
grp = grp.groupby("lineage").agg({
"total_count": lambda x: x.iloc[0],
"lineage_count": "sum"
})
return grp
def get_major_lineage_prevalence(df, index_col, keep_lineages = [], prevalence_threshold = 0.05, nday_threshold = 10, ndays = 180):
date_limit = dt.today() - timedelta(days = ndays)
lineages_to_retain = df[(df["prevalence"] >= prevalence_threshold) & (df["date"] >= date_limit)]["lineage"].value_counts()
num_unique_dates = df[df["date"] >= date_limit]["date"].unique().shape[0]
if num_unique_dates < nday_threshold:
nday_threshold = round((nday_threshold/ndays) * num_unique_dates)
lineages_to_retain = lineages_to_retain[lineages_to_retain >= nday_threshold].index.tolist()
lineages_to_retain.extend(keep_lineages)
df = df.groupby(index_col).apply(classify_other_category, lineages_to_retain)
df = df.reset_index()
df.loc[:,"prevalence"] = df["lineage_count"]/df["total_count"]
return df
def parse_location_id_to_query(query_id, query_obj = None):
if query_id == None:
return None
location_codes = query_id.split("_")
if query_obj == None:
query_obj = {
"bool": {
"must": []
}
}
location_types = ["country_id", "division_id", "location_id"]
for i in range(min(3, len(location_codes))):
if i == 1 and len(location_codes[i].split("-")) > 1: # For division remove iso2 code if present
location_codes[i] = location_codes[i].split("-")[1]
if "must" in query_obj["bool"]:
query_obj["bool"]["must"].append({
"term": {
location_types[i]: location_codes[i]
}
})
elif "should" in query_obj["bool"] and len(query_obj["bool"]["should"]) > 0: # For create_nested_mutation_query IF both lineages and mutations supplied.
for bool_must in query_obj["bool"]["should"]:
bool_must["bool"]["must"].append({
"term": {
location_types[i]: location_codes[i]
}
})
return query_obj
def parse_time_window_to_query(date_range_filter=None, query_obj=None):
if date_range_filter is None:
return query_obj
if query_obj is None:
query_obj = {
"bool": {
"must": []
}
}
if "must" in query_obj["bool"]:
query_obj["bool"]["must"].append(date_range_filter)
elif "should" in query_obj["bool"] and len(query_obj["bool"]["should"]) > 0:
for bool_must in query_obj["bool"]["should"]:
bool_must["bool"]["must"].append(date_range_filter)
return query_obj
def create_lineage_concat_query(queries, query_tmpl):
queries = queries.split(",")
if len(queries) == 1:
query_tmpl["query"] = {
"bool": {
"filter": [
{"term": {"pangolin_lineage": queries[0]}}
]
}
}
else:
query_tmpl["query"] = {
"bool": {
"should": [
{"term": {"pangolin_lineage": i}}
for i in queries
]
}
}
def create_iterator(lineages, mutations):
#print(lineages)
#print(mutations)
if len(lineages) > 0:
return zip(lineages, [mutations] * len(lineages))
if len(lineages) == 0 and len(mutations) > 0:
return zip([None], [mutations])
return zip([], [])
def get_total_hits(d): # To account for difference in ES versions 7.12.0 vs 6.8.13
return d["hits"]["total"]["value"] if isinstance(d["hits"]["total"], dict) else d["hits"]["total"]
def create_date_range_filter(field_name, min_date=None, max_date=None):
date_range_filter = {"range": {field_name: {}}}
if not max_date and not min_date:
return None
if max_date:
date_range_filter["range"][field_name]["lte"] = max_date
if min_date:
date_range_filter["range"][field_name]["gte"] = min_date
return date_range_filter
<file_sep>import functools
import inspect
import urllib.parse
from typing import Callable, Optional, Awaitable
from config_web import GPS_CLIENT_ID, GPS_API_ENDPOINT, GPS_AUTHN_URL, SECRET_KEY, CACHE_TIME, WHITELIST_KEYS
import jwt
from datetime import datetime as dt, timedelta, timezone
import aiohttp
from tornado.web import RequestHandler, HTTPError
# 15 seconds may or may not be a reasonable default
_gisaid_gps_api_http_client = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(15.0))
def gisaid_authorized(method: Callable[..., Optional[Awaitable[None]]]) ->\
Callable[..., Optional[Awaitable[None]]]:
@functools.wraps(method)
async def wrapper(self: RequestHandler, *args, **kwargs) -> Optional[Awaitable[None]]:
try:
authz_header = self.request.headers['Authorization']
except KeyError:
self.set_status(401)
self.add_header('WWW-Authenticate',
'Bearer realm="GISAID Authentication-Token"')
return self.finish()
# now do authn with that token we got
parts = authz_header.split()
# check for malformed authz header
if len(parts) != 2 or parts[0] != "Bearer":
raise HTTPError(400)
# we are assuming that the token is of type str
if parts[1] in WHITELIST_KEYS:
result = method(self, *args, **kwargs)
if inspect.isawaitable(result):
return await result
else:
return result
decoded_token = None
try:
decoded_token = jwt.decode(parts[1], SECRET_KEY, algorithms=["HS256"])
except jwt.ExpiredSignatureError:
self.set_status(403)
self.add_header('WWW-Authenticate',
'Bearer realm="GISAID Authentication-Token"')
self.write({"message": "Token expired. Please re-authenticate!"})
return self.finish()
except jwt.DecodeError:
self.set_status(403)
self.add_header('WWW-Authenticate',
'Bearer realm="GISAID Authentication-Token"')
self.write({"message": "Invalid token. Please authenticate!"})
return self.finish()
token_diff_time = (dt.now(timezone.utc) - dt.utcfromtimestamp(decoded_token["last_checked"]).replace(tzinfo=timezone.utc)).seconds
#print(token_diff_time)
reset_last_checked = False # False only if cache expired and token is unauthenticated
if token_diff_time <= CACHE_TIME: # Cached token
if decoded_token["is_authenticated"]: # Authenticated
result = method(self, *args, **kwargs)
if inspect.isawaitable(result):
return await result
else:
return result
else: # Unauthenticated
reset_last_checked = True
elif token_diff_time <= CACHE_TIME * 2: # Cache expired. Extend cache time to allow user to get new token
if decoded_token["is_authenticated"]: # Authenticated
reset_last_checked = True
if reset_last_checked:
request_params = {
"api": {"version": 1},
"ctx": "cov",
"client_id": GPS_CLIENT_ID,
"auth_token": decoded_token["authn_token"],
"cmd": "state/auth/check"
}
resp = await _gisaid_gps_api_http_client.post(
GPS_API_ENDPOINT, json=request_params
)
resp.raise_for_status() # raise on non 200 resp.
resp_json = await resp.json()
if resp_json['rc'] == 'ok':
encoded_api_token = jwt.encode({
"authn_token": decoded_token["authn_token"],
"last_checked": dt.now(timezone.utc).timestamp(),
"is_authenticated": True
}, SECRET_KEY, algorithm="HS256")
self.add_header('X-Auth-Token', encoded_api_token)
result = method(self, *args, **kwargs)
if inspect.isawaitable(result):
return await result
else:
return result
else:
self.set_status(403)
self.write({'gisaid_response': resp_json['rc']})
return self.finish()
else:
self.set_status(403)
self.add_header('WWW-Authenticate',
'Bearer realm="GISAID Authentication-Token"')
self.write({"message": "Invalid token. Please authenticate!"})
return wrapper
class GISAIDTokenHandler(RequestHandler):
@gisaid_authorized
def get(self):
"""
Check validity of a token against GISAID API,
Respond with code 200 if valid, or else respond with 403.
"""
return
async def post(self):
request_params = {
"api": {"version": 1}, "ctx": "cov",
"client_id": GPS_CLIENT_ID,
"cmd": "state/auth/make_token"
}
resp = await _gisaid_gps_api_http_client.post(
GPS_API_ENDPOINT,
json=request_params
)
resp.raise_for_status()
resp_body = await resp.json()
if resp_body['rc'] == 'ok':
token = resp_body['auth_token']
# Create JWT token with expiry and authenticated
encoded_api_token = jwt.encode({
"authn_token": token,
"is_authenticated": False,
"last_checked": dt.now(timezone.utc).timestamp()
}, SECRET_KEY, algorithm="HS256")
self.write({
'authn_token': encoded_api_token,
'authn_url': urllib.parse.urljoin(GPS_AUTHN_URL, token)
})
else:
self.write({}) # I think it may be better to raise a 5xx error
| 6ff6c0ae5c6ed3e7d50f313809ccd41b8d030c4b | [
"Markdown",
"TOML",
"JavaScript",
"Python",
"Text"
] | 46 | Markdown | outbreak-info/outbreak.api | b3aa3ca72d3059a28d8171488be4c5f5c2281578 | 22d895512ff43339d0f6d46b488984ddf62f0019 |
refs/heads/master | <file_sep>-- Up
CREATE TABLE config (id INTEGER, url TEXT);
INSERT INTO config (id) VALUES (1);
-- Down
DROP TABLE config;
<file_sep>/* eslint-disable no-unused-expressions */
'use strict';
const chai = require('chai');
const sinon = require('sinon');
const proxyquire = require('proxyquire').noPreserveCache();
const expect = chai.expect;
describe('Call job', () => {
let hubot;
let speakSpy;
let logDetailedErrorSpy;
let message;
let task;
beforeEach(() => {
message = { user: 'hubot', channel: 'myChannel' };
task = { options: { message: 'Hello World' } };
hubot = {
speak () {},
logDetailedError() {}
};
speakSpy = sinon.spy(hubot, 'speak');
logDetailedErrorSpy = sinon.spy(hubot, 'logDetailedError');
});
describe('with correct parameter', () => {
it('job name', () => {
const callJobStub = sinon.stub().resolves();
const callJobSpy = sinon.spy(callJobStub);
const startJob = getStartJob(callJobSpy);
return startJob.handle(hubot, message, task, ['deploy-job']).then(() => {
expect(callJobSpy.calledWith('deploy-job')).to.be.true;
});
});
});
describe('with success', () => {
it('post message with task message', () => {
const callJobStub = sinon.stub().resolves();
const startJob = getStartJob(callJobStub);
return startJob.handle(hubot, message, task, ['deploy-job']).then(() => {
expect(speakSpy.calledWith(message, task.options.message)).to.be.true;
});
});
});
describe('with error', () => {
describe('job does not exists', () => {
it('post message with job not found message', () => {
const callJobStub = sinon.stub().rejects({ notFound: true });
const startJob = getStartJob(callJobStub);
return startJob.handle(hubot, message, task, ['deploy-job']).then(() => {
expect(speakSpy.calledWith(message, 'jenkins:notFoundedJob')).to.be.true;
});
});
});
describe('not known', () => {
it('post message with general error message', () => {
const callJobStub = sinon.stub().rejects({ });
const startJob = getStartJob(callJobStub);
return startJob.handle(hubot, message, task, ['deploy-job']).then(() => {
expect(speakSpy.calledWith(message, 'jenkins:errorOnStartJob')).to.be.true;
});
});
it('and log error', () => {
const error = {};
const callJobStub = sinon.stub().rejects(error);
const startJob = getStartJob(callJobStub);
return startJob.handle(hubot, message, task, ['deploy-job']).then(() => {
expect(logDetailedErrorSpy.calledWith('jenkins:log.error.onCall', error)).to.be.true;
});
});
});
});
function getStartJob(callJobStub) {
return proxyquire('../../src/handlers/start-job', { '../../src/jenkins': { callJob: callJobStub } });
}
});
<file_sep># [v2.0.0](https://github.com/hubot-js/gear-jenkins/releases/tag/2.0.0) (2016-11-29)
### Features
[Project](https://github.com/hubot-js/gear-jenkins/projects/1)
* Gear Jenkins is now configurable by the Slack
# [v1.0.1](https://github.com/hubot-js/gear-jenkins/releases/tag/1.0.1) (2016-08-26)
### Features
* log when unknow error occurs ([38a2343](https://github.com/hubot-js/gear-jenkins/commit/38a2343))
* included crumbissuer option ([85e6773](https://github.com/hubot-js/gear-jenkins/commit/85e6773))
* add MIT license ([3cf1383](https://github.com/hubot-js/gear-jenkins/commit/3cf1383))
* improve readme ([a729921](https://github.com/hubot-js/gear-jenkins/commit/a729921)), close [#1](https://github.com/hubot-js/gear-jenkins/issues/1)
* create changelog file
# [v1.0.0](https://github.com/hubot-js/gear-jenkins/releases/tag/1.0.0) (2016-08-22)
### Features
* Call Jenkins job by name
<file_sep># gear-jenkins
[](https://travis-ci.org/hubot-js/gear-jenkins) [](https://www.npmjs.com/package/gear-jenkins) [](https://coveralls.io/github/hubot-js/gear-jenkins?branch=master) [](https://codeclimate.com/github/hubot-js/gear-jenkins) [](https://david-dm.org/hubot-js/gear-jenkins) [](https://david-dm.org/hubot-js/gear-jenkins?type=dev)
> A Hubot Gear for handle Jenkins tasks
This is a gear to add to [hubot.js](https://github.com/hubot-js/hubot.js) the ability to interact with [Jenkins](https://jenkins.io/). If you do not know the hubot.js or do not know what they are gears like this [click here](https://github.com/hubot-js/hubot.js/blob/master/README.md) for more details.

## Starting
To use this gear you must first activate it with the command:
```
activate jenkins
```

## Configurations
Some settings are required. They can be made by Slack using the following command:
```
configure jenkins
```

These settings are stored, so you just need to do them once. But if necessary can make them again.
Below are the details about each setting:
### Jenkins url
If you use Jenkins without security (authentication) the authorization link is simply the access url. For example: `http://your.jenkins.com:8080`
If you use the Jenkins authentication, you need to find your access token. It can be obtained from `yourJenkinsUrl/me/configure`. See more details [here](https://wiki.jenkins-ci.org/display/JENKINS/Authenticating+scripted+clients). In this case your authorization link should be in this format: `http://your_user:your_token@your_jenkins_url`
## Usage
When hubot.js starts you can call Jenkins of jobs writing the following sentence. Replace "my-deploy" with the name of your job.
```
hubot start job my-deploy
```

## Development setup
- Fork and clone this project
- In the main directory run ```npm install```to install dependencies.
- Write your code. More Jenkins functions can be found [here](https://github.com/silas/node-jenkins).
- To run tests use ```npm test``` command
## Meta
<NAME> - @rluizv - <EMAIL>
Distributed under the MIT license. See [LICENSE](LICENSE) for more information.
https://github.com/hubot-js/gear-jenkins
<file_sep>'use strict';
const Q = require('q');
const db = require('../../src/db');
const request = require('request-promise');
exports.handle = handle;
function handle(hubot, awnser) {
const deferred = Q.defer();
if (awnser === hubot.i18n('jenkins:configuration.skip')) {
deferred.resolve();
return deferred.promise;
}
const url = getUrl(awnser);
const successMessage = 'jenkins:configuration.url.responds';
const errorMessage = 'jenkins:configuration.url.notResponds';
db.getDb().then(database => database.run('UPDATE config SET url = ?', url));
request.get(url)
.then(() => deferred.resolve(successMessage))
.catch(() => deferred.reject(errorMessage));
return deferred.promise;
}
function getUrl(awnser) {
let url = awnser;
if (url.includes('|')) {
url = url.replace('<', '').substring(0, url.indexOf('|') - 1);
} else {
url = url.replace('<', '').replace('>', '');
}
return url;
}
<file_sep>'use strict';
const jenkins = require('../../src/jenkins');
exports.handle = handle;
function handle(hubot, message, task, params) {
return start(hubot, message, task, params[0]);
}
function start(hubot, message, task, job) {
return jenkins.callJob(job).then(() => {
hubot.speak(message, task.options.message);
}, (error) => {
if (error.notFound) {
hubot.speak(message, 'jenkins:notFoundedJob', { job });
} else {
hubot.logDetailedError('jenkins:log.error.onCall', error);
hubot.speak(message, 'jenkins:errorOnStartJob', { job });
}
});
}
<file_sep>/* eslint-disable no-unused-expressions */
'use strict';
require('sinon-as-promised');
const sinon = require('sinon');
const chai = require('chai');
const chaiAsPromised = require('chai-as-promised');
const proxyquire = require('proxyquire').noPreserveCache();
const expect = chai.expect;
chai.use(chaiAsPromised);
const authUrl = 'http://jenkins-test.com/token';
describe('Jenkins call job', () => {
it('with the the correct job name', () => {
const config = { url: authUrl };
const dbStub = getDbStub(config);
const buildStub = sinon.stub().resolves();
const buildSpy = sinon.spy(buildStub);
const jobStub = { build: buildSpy };
const infoStub = sinon.stub().resolves({ useCrumbs: true });
const jenkins = getJenkins(dbStub, jobStub, infoStub);
return jenkins.callJob('build-test').then(() => {
expect(buildSpy.calledWith('build-test')).to.be.true;
});
});
});
function getDbStub(config) {
const db = { get() {} };
sinon.stub(db, 'get').resolves(config);
const dbStub = function buildDbStub() {
return Promise.resolve(db);
};
return dbStub;
}
function getJenkins(dbStub, jobStub, infoStub) {
function jenkinsStub() {
return {
job: jobStub,
info: infoStub
};
}
return proxyquire('../src/jenkins', {
'./db': { getDb: dbStub },
jenkins: jenkinsStub
});
}
<file_sep>'use strict';
const db = require('./db');
exports.callJob = callJob;
function callJob(jobName) {
const p1 = db.getDb();
const p2 = p1.then(getConfig);
const p3 = p2.then(getJenkinsInfo);
return Promise.all([p1, p2, p3]).then(params => build(jobName, params[1], params[2]));
}
function getConfig(dataBase) {
return dataBase.get('SELECT * FROM config');
}
function getJenkinsInfo(config) {
const jenkins = requireJenkins(config.url);
return jenkins.info();
}
function build(jobName, config, info) {
const jenkins = requireJenkins(config.url, info.useCrumbs);
return jenkins.job.build(jobName);
}
function requireJenkins(url, useCrumbs) {
const jenkinsOptions = {
baseUrl: url,
promisify: true,
crumbIssuer: useCrumbs
};
return require('jenkins')(jenkinsOptions);
}
<file_sep>/* eslint-disable no-unused-expressions */
'use strict';
require('sinon-as-promised');
const path = require('path');
const sinon = require('sinon');
const expect = require('chai').expect;
const proxyquire = require('proxyquire').noPreserveCache();
describe('Data base creation', () => {
it('should open db and run migrations scripts', () => {
const database = sinon.stub();
const openStub = sinon.stub();
const migrateStub = sinon.stub().resolves(database);
const openSpy = sinon.spy(openStub);
const migrateSpy = sinon.spy(migrateStub);
const sqlite = { open: openSpy, migrate: migrateSpy };
const db = buildDb(sqlite);
openStub.resolves(sqlite);
db.getDb();
const basePath = path.join(process.env.HOME, 'hubot.js', 'data');
expect(openSpy.calledWith(`${basePath}/gear-jenkins.db`)).to.be.true;
migrateSpy().then(() => {
expect(migrateSpy.calledWithMatch(
{ migrationsPath: `${basePath}/migrations` })).to.be.true;
});
});
it('do nothing when error occurs on open', () => {
const openStub = sinon.stub().rejects();
const openSpy = sinon.spy(openStub);
const migrateSpy = sinon.spy();
const sqlite = { open: openSpy, migrate: migrateSpy };
const db = buildDb(sqlite);
db.getDb();
const basePath = path.join(process.env.HOME, 'hubot.js', 'data');
expect(openSpy.calledWith(`${basePath}/gear-jenkins.db`)).to.be.true;
expect(migrateSpy.calledWithMatch(
{ migrationsPath: `${basePath}/migrations` })).to.be.false;
});
it('do nothing when error occurs on migration', () => {
const openStub = sinon.stub();
const migrateStub = sinon.stub().rejects();
const openSpy = sinon.spy(openStub);
const migrateSpy = sinon.spy(migrateStub);
const sqlite = { open: openSpy, migrate: migrateSpy };
const db = buildDb(sqlite);
openStub.resolves(sqlite);
db.getDb();
});
});
describe('Get data base', () => {
it('when call getDb', () => {
const database = sinon.stub();
const openStub = sinon.stub();
const migrateStub = sinon.stub().resolves(database);
const openSpy = sinon.spy(openStub);
const migrateSpy = sinon.spy(migrateStub);
const sqlite = { open: openSpy, migrate: migrateSpy };
const db = buildDb(sqlite);
openStub.resolves(sqlite);
return db.getDb().then((result) => {
expect(result).to.be.deep.equal(database);
});
});
});
describe('Get opened data base', () => {
it('when already exist a opened database', () => {
const database = sinon.stub();
const openStub = sinon.stub();
const migrateStub = sinon.stub().resolves(database);
const openSpy = sinon.spy(openStub);
const migrateSpy = sinon.spy(migrateStub);
const sqlite = { open: openSpy, migrate: migrateSpy };
const db = buildDb(sqlite);
openStub.resolves(sqlite);
return db.getDb().then(() => db.getDb()).then(() => {
expect(openSpy.calledOnce).to.be.true;
});
});
});
function buildDb(sqlite) {
return proxyquire('../src/db', { sqlite });
}
| 9081d8e8fa2b6316dd456cb0185e5054205efc3c | [
"JavaScript",
"SQL",
"Markdown"
] | 9 | SQL | hubot-js/gear-jenkins | 0c0977d95d769c0857f10800e75fd4e674bd5755 | fdb081cda73dde689ea6b4ea25ccf9f8786f7931 |
refs/heads/master | <file_sep>#!/bin/bash
VERSION=$(git describe --tags --abbrev=0)
docker build -t terrabrasilis/terrabrasilis-oauth-api:$VERSION -f Dockerfile .
docker push terrabrasilis/terrabrasilis-oauth-api:$VERSION
<file_sep>
const Service = {
/**
* Validate token values to checks if the token is still valid for the expiration date and if compatible with the requested resource
* @param {*} expiration Time from Python in Seconds
*/
validate(jwtUser, resource) {
var user={
expirationDate : null,
issuedAtDate : null,
id : null,
requestedResource : resource,
accessType : null,
accessName : null,
accessAction : null,
authenticated : false,
token : ''
}
if(jwtUser)
{
user.expirationDate = new Date(jwtUser.exp * 1000);
user.issuedAtDate = new Date(jwtUser.iat * 1000);
user.id = jwtUser.user_id;
user.requestedResource = resource;
user.accessType = jwtUser.access[0].type;
user.accessName = jwtUser.access[0].name;
user.accessAction = jwtUser.access[0].actions[0];
user.authenticated = false;
user.token = '';
var currentDate = new Date();
//Comparation in milliseconds
if (currentDate.getTime() < user.expirationDate.getTime()) {
if (this.validateUserPermissionToAction(user)) {
user.authenticated = true;
}
else {
user.error = "The requested resource is not available for this authentication session.";
}
}
else {
user.error = "The requested authentication session has expired.";
}
}
return user;
},
/**
* Get from the oauth server the permited scope for user and validate the request resource
* @param {*} user
*/
validateUserPermissionToAction(user)
{
/*
TODO: we don't need to validade permission on this group of application yet. If the user is authenticated (not expired) it's permited.
*/
return true;
},
/**
* Function to be used as Authentication Validation Log
* @param {*} user JWT User class
*/
logAccess(user)
{
console.log("--- User validation ---");
console.log("{");
console.log(" User id: " + user.id);
console.log(" Access name: " + user.accessName);
console.log(" Access type: " + user.accessType);
console.log(" Expiration: " + user.expirationDate);
console.log(" Expiration Issued At: " + user.issuedAtDate);
console.log(" Actions: " + user.accessAction);
console.log(" Authenticated: " + user.authenticated);
console.log(" Error message: " + user.error);
console.log(" Requested Resource: " + user.requestedResource);
console.log("}");
}
}
export default Service
<file_sep>import { index } from './validate.controller'
import router from 'koa-router'
const validate = router()
validate.get('/:resource', index)
validate.post('/:resource', index)
export default validate
<file_sep>
export class Constants
{
}
export default Constants
<file_sep>import health from './health';
import validate from './validate';
import getoauthjs from './getoauthjs';
export {health, validate, getoauthjs}<file_sep>## NODE VERSION
You should user Node version >= 12
## Installation
```sh
$ yarn install
```
## Usage
# Deploy Info
### Build and Run your image
From your Node.js app project folder launch those commands:
```bash
$ docker build -t terrabrasilis-oauth-api .
$ docker run -p 9000:9000 terrabrasilis-oauth-api
```
### Pushing to Docker Hub:
```bash
$ docker build -t terrabrasilis-oauth-api .
$ docker images # look and find the hash you want
$ docker tag local-image:tagname reponame:tagname
$ docker push reponame:tagname
# EXAMPLE:
$ docker tag bfe06c2b5dea terrabrasilis/terrabrasilis-oauth-api:v1.0.0
$ docker push terrabrasilis/terrabrasilis-oauth-api
```
## Useful commands
Command | Description
--------|------------
```$ docker exec -it <container-id> pm2 monit``` | Monitoring CPU/Usage of each process
```$ docker exec -it <container-id> pm2 list``` | Listing managed processes
```$ docker exec -it <container-id> pm2 show``` | Get more information about a process
```$ docker exec -it <container-id> pm2 reload all``` | 0sec downtime reload all applications
```$ docker exec -it <container-id> pm2 logs --format``` | see all applications logs
```$ docker exec -it <container-id> pm2 flush``` | flush applications logs
<file_sep>var Authentication = {
oauthBaseURL: "http://oauth.dpi.inpe.br",
oauthApiURL: "http://oauth.dpi.inpe.br/api",
tokenKey: "oauth.obt.inpe.br",
service: "terrabrasilis",
scope: "portal:dash:admin",
expiredKey: "expired_token",
usedInfoKey: "user_info",
usedDataKey: "user",
loginStatusChangedCallback: null,
internalValidationOauthApiURL: "/oauth-api/",
expirationGuardInterval: null,
validationData: null,
validationInterval: 300000,
ul2append: "#navigationBarUL",
init(language, loginStatusChanged, serverURL)
{
if(serverURL) this.serverURL=serverURL;
else this.serverURL=this.internalValidationOauthApiURL;
this.loginStatusChangedCallback=loginStatusChanged;
AuthenticationTranslation.init(language);
this.buildLoginDropdownMenu();
this.addLoginCss();
this.equalizeStorageAndCookieToken();
if(this.hasToken())
{
this.validateToken(this.getToken());
}
},
initCustom(language, loginStatusChanged, customUl2append)
{
if(customUl2append)
{
this.ul2append = customUl2append;
}
this.init(language, loginStatusChanged);
},
showAuthenticationModal() {
//Verify if authentication modal already exists
if($('#authentication-div').length>0)
{
$('#authentication-div').remove();
}
//Authentication div
var authenticationDiv = $('<div>', {
id: 'authentication-div',
frameborder: 0,
class: "modal-auth",
tabindex: "-1",
role: "dialog"
});
authenticationDiv.appendTo("body");
//Modal div
var modalLoginDiv = $('<div>',
{
id: "modal-login",
class: "modal-auth-dialog modal-auth-dialog-centered ",
style: "min-width: 500px;"
});
$('#authentication-div').append(modalLoginDiv);
//Modal Content div
var modalContentDiv = $('<div>',
{
class: "modal-auth-content box-login"
});
modalLoginDiv.append(modalContentDiv);
//Modal Header Div
var modalCloseButton = '<span class="close">×</span>';
var modalHeaderDiv = $('<div>',
{
class: "modal-auth-header",
html: modalCloseButton
});
modalContentDiv.append(modalHeaderDiv);
//Modal Body Div
var modalBodyDiv = $('<div>',
{
class: "modal-body",
id: "authentication-body"
});
modalContentDiv.append(modalBodyDiv);
//Box login form logo Div
var modalBoxLoginFormLogo = $('<div>',
{
class: "box-login-form-logo"
});
modalBoxLoginFormLogo.append('<img src="'+Authentication.serverURL+'images/logo-terrabrasilis.png" alt="logo">');
modalBodyDiv.append(modalBoxLoginFormLogo);
modalBodyDiv.append('<span class="box-login-form-title">TerraBrasilis</span>');
//Modal Footer Div
// Login box form
var loginBoxForm = $('<div>',
{
class: 'box-form'
});
modalBodyDiv.append(loginBoxForm);
//Login input div
var loginInputDiv = $('<div>',{
class:"wrap-box-input validate-input"
});
loginInputDiv.attr('data-validate', AuthenticationTranslation.getTranslated('username-validation'));
loginInputDiv.append('<i class="material-icons">person</i>');
loginInputDiv.append('<input class="box-input" type="text" name="username" placeholder="'+AuthenticationTranslation.getTranslated('username')+'" id="username-input">');
loginInputDiv.append('<span class="focus-box-input" data-placeholder=""></span>');
loginBoxForm.append(loginInputDiv);
//Login password div
var loginPasswordDiv = $('<div>',{
class:"wrap-box-input validate-input"
});
loginPasswordDiv.attr('data-validate', AuthenticationTranslation.getTranslated('password-validation'));
loginPasswordDiv.append('<i class="material-icons">lock</i>');
loginPasswordDiv.append('<input class="box-input" type="<PASSWORD>" name="pass" placeholder="'+AuthenticationTranslation.getTranslated('password')+'" id="password-input">');
loginPasswordDiv.append('<span class="focus-box-input" data-placeholder=""></span>');
loginBoxForm.append(loginPasswordDiv);
var btnContainerDiv = $('<div>',
{
class:"container-box-login-form-btn"
});
btnContainerDiv.append('<button type="button" id="login-button" onclick="Authentication.validateLogin()" class="box-login-form-btn">'+AuthenticationTranslation.getTranslated('submitLogin')+'</button>');
loginBoxForm.append(btnContainerDiv);
var loginFormAlert = $('<div>',
{
class: 'alert alert-danger alert-dismissible show align-middle justify-content-center',
role: 'alert',
id:'loginAlert'
});
loginFormAlert.hide();
modalBodyDiv.append(loginFormAlert);
$(function(){
//Login when pressing enter
$('#password-input').keypress(function(e){
if(e.which == 13) {
$('#login-button').click();
}
})
});
this.showAuhenticationDiv(true);
//$('#authentication-div').modal('show');
},
showWarningDiv(show)
{
if(show==true)
{
var authenticationDiv = $("#modal-container-warning");
authenticationDiv.css("display","block");
}
else
{
var authenticationDiv = $("#modal-container-warning");
authenticationDiv.css("display","none");
}
},
showAuhenticationDiv(show)
{
if(show==true)
{
var authenticationDiv = $("#authentication-div");
authenticationDiv.css("display","block");
$('body').css("overflow-y", "hidden");
$(".close").on("click",function()
{
Authentication.showAuhenticationDiv(false);
});
}
else
{
$('body').css("overflow-y", "");
var authenticationDiv = $("#authentication-div");
authenticationDiv.css("display","none");
}
},
validateLogin()
{
let user = $('#username-input').val();
let pass = $('#password-input').val();
if (user==="" || pass==="")
{
Authentication.handleError(AuthenticationTranslation.getTranslated('missing-user-pass'));
}
else
{
Authentication.login(user, pass);
}
},
validateToken(userToken)
{
$.ajax(this.internalValidationOauthApiURL + "validate/" + this.service, {
type: "GET",
dataType: 'json',
headers: {
"Authorization": "Bearer " + userToken
},
contentType: "application/json",
}).done(function (data) {
console.log("User authentication token is valid");
Authentication.validationData = data;
Authentication.configureExpirationGuard();
}).fail(function (xhr, status, error) {
console.log("User authentication token is invalid, logging out...");
Authentication.logout();
});
},
handleError(message)
{
$('#loginAlert').html(message);
$("#loginAlert").fadeTo(2000, 500).slideUp(500, function() {
$("#loginAlert").slideUp(500);
});
},
login(user, pass) {
$.ajax(this.oauthApiURL + "/oauth/auth/login", {
type: "POST",
dataType: 'json',
data: '{ "username": "' + user + '","password": "' + <PASSWORD> + '" }',
contentType: "application/json",
}).done(function (data) {
Authentication.setUserData(JSON.stringify(data));
Authentication.loadAppToken(data.access_token);
}).fail(function (xhr, status, error) {
console.log("Could not reach the API to authenticate the user: " + error);
Authentication.handleError(AuthenticationTranslation.getTranslated('authenticationFailed'));
});
},
loadUserInfo(userId, userToken) {
$.ajax(this.oauthApiURL + "/oauth/users/" + userId, {
type: "GET",
dataType: 'json',
headers: {
"Authorization": "Bearer " + userToken
},
contentType: "application/json",
}).done(function (data) {
Authentication.setUserInfo(JSON.stringify(data));
//$('#authentication-div').modal('hide');
Authentication.showAuhenticationDiv(false);
Authentication.buildLoginDropdownMenu();
Authentication.removeExpiredToken();
}).fail(function (xhr, status, error) {
console.log("Could not reach the API to obtain the user info: " + error);
Authentication.logout();
});
},
loadAppToken(userToken) {
$.ajax(this.oauthApiURL + "/oauth/auth/token?service=" + this.service + "&scope=" + this.scope, {
type: "GET",
dataType: 'json',
headers: {
"Authorization": "Bearer " + userToken
},
contentType: "application/json",
}).done(function (data) {
var myToken = data.access_token;
Authentication.setToken(myToken);
Authentication.loadUserInfo(data.user_id, userToken);
Authentication.validateToken(myToken);
Authentication.loginStatusChanged();
return true;
}).fail(function (xhr, status, error) {
console.log("Could not reach the API to obtain App Token: " + error);
// $('#modal-container-warning').modal('show');
this.showWarningDiv(true);
return false;
});
},
dropUser() {
if(confirm(AuthenticationTranslation.getTranslated('drop-user-confirm'))) {
let dataUser=Authentication.getUserData();
$.ajax(this.oauthApiURL + "/oauth/users/" + dataUser.user_id, {
type: "DELETE",
dataType: 'json',
headers: {
"Authorization": "Bearer " + dataUser.access_token
},
contentType: "application/json",
}).done(function (data) {
Authentication.logout();
alert(AuthenticationTranslation.getTranslated('drop-user-ok'));
}).fail(function (xhr, status, error) {
console.log("Could not reach the API to delete the user: " + error);
Authentication.logout();
alert(AuthenticationTranslation.getTranslated('drop-user-fail'));
});
}
},
setToken(value) {
this.setKey(this.tokenKey, value);
this.setCookie(this.tokenKey, value, 1);
},
/**
* Set a new value for the specified key.
* To remove one key, set the value with null or undefined
*
* @param {string} key The name of key
* @param {any} value The value of key
*/
setKey(key, value) {
if (localStorage) {
if (value === undefined || value === null) {
localStorage.removeItem(key);
} else {
localStorage.setItem(key, value);
}
} else {
console.log("Sorry! No Web Storage support..");
}
},
setUserInfo(value) {
this.setKey(this.usedInfoKey, value);
},
setUserData(value){
this.setKey(this.usedDataKey, value);
},
getUserData(){
return JSON.parse(this.getValueByKey(this.usedDataKey));
},
removeUserData(){
this.setKey(this.usedDataKey, null);
},
logout() {
this.removeToken();
this.removeUserInfo();
this.removeUserData();
this.removeExpiredToken();
this.buildLoginDropdownMenu();
this.loginStatusChanged();
this.eraseCookie(this.tokenKey);
if(this.expirationGuardInterval!=null)
{
clearInterval(this.expirationGuardInterval);
this.expirationGuardInterval=null;
}
this.validationData=null;
},
removeUserInfo() {
this.setKey(this.usedInfoKey, null);
},
removeToken() {
this.setKey(this.tokenKey, null);
},
removeExpiredToken() {
this.setKey(this.expiredKey, null);
},
isExpiredToken() {
return this.getValueByKey(this.expiredKey)==="true";
},
/**
* Functions attach the login navigation item to a default toolbar (Also checks if the user is logged in and build the login dropdown menu)
*/
buildLoginDropdownMenu() {
//Verifiy if login LI already exists
let li = $('#login-li');
if (li && li.length > 0) {
li.empty();
}
else {
li = $('<li>',
{
class: "nav-item dropdown-auth",
id: "login-li"
});
}
let a = $('<a/>',
{
class: "nav-link dropdown-toggle",
id: "navbarDropdownLoginLink",
role: "button"
});
a.attr("aria-expanded", "true");
a.attr("aria-haspopup", "true");
a.attr("data-toggle", "dropdown");
a.attr("href", "#");
let userImageUrl = '';
if (this.hasToken())
{
userImageUrl = Authentication.serverURL+'images/user-logado.png';
}
else
{
userImageUrl = Authentication.serverURL+'images/user-deslogado.png';
}
//Adding image logged or unlogged
let imagetag = '<img id="login" style="width:28px; height:28px; border-radius:50%" alt="Login" src="'+userImageUrl+'" title="Autentique">';
a.append('<i class="material-icons iconmobile">assignment </i><span id="maps-sup">'+imagetag+'</span>');
a.appendTo(li);
let dropDownDiv = $('<div/>',
{
id: "navbarDropdownLoginPopup",
class: "submenu dropdown-auth-content",
style: ""
});
dropDownDiv.attr("aria-labelledby", "navbarDropdownLoginLink");
dropDownDiv.appendTo(li);
if (this.hasToken()) {
let info = this.getUserInfo();
$('<a/>',
{
class: 'dropdown-auth-item',
html: '<b style="color:#6c757d;">' + info.name + ' / ' + info.institution + '</b>'
}).appendTo(dropDownDiv);
let a = $('<a/>',
{
class: 'dropdown-auth-item',
html: '<span >'+AuthenticationTranslation.getTranslated('change-pass')+'</span>'
});
a.attr("href", this.oauthBaseURL);
a.appendTo(dropDownDiv);
a = $('<a/>',
{
class: 'dropdown-auth-item',
html: '<span >'+AuthenticationTranslation.getTranslated('drop-user')+'</span>'
});
a.attr("href", "javascript:Authentication.dropUser();");
a.appendTo(dropDownDiv);
a = $('<a/>',
{
class: 'dropdown-auth-item',
html: '<span >'+AuthenticationTranslation.getTranslated('logout')+'</span>'
});
a.attr("href", "javascript:Authentication.logout();");
a.appendTo(dropDownDiv);
}
else {
let a = $('<a/>',
{
class: 'dropdown-auth-item',
html: '<span >'+AuthenticationTranslation.getTranslated('login')+'</span>'
});
a.attr("href", "javascript:Authentication.showAuthenticationModal();");
a.appendTo(dropDownDiv);
a = $('<a/>',
{
class: 'dropdown-auth-item',
html: '<span >'+AuthenticationTranslation.getTranslated('reset-pass')+'</span>'
});
a.attr("href", this.oauthBaseURL);
a.appendTo(dropDownDiv);
}
//Appending to navigation menu default UL
$(this.ul2append).append(li);
},
hasToken() {
var token = this.getValueByKey(this.tokenKey);
return (token && token != "");
},
getToken() {
return this.getValueByKey(this.tokenKey);
},
getValueByKey(key) {
var value = null;
if (localStorage) {
value = localStorage.getItem(key);
} else {
console.log("Sorry! No Web Storage support..");
}
return value;
},
getUserInfo() {
return JSON.parse(this.getValueByKey(this.usedInfoKey));
},
loginStatusChanged()
{
if(this.loginStatusChangedCallback!=null)
{
this.loginStatusChangedCallback();
}
},
setLoginStatusChangedCallback(callback)
{
this.loginStatusChangedCallback = callback;
},
setExpiredToken(state) {
this.setKey(this.expiredKey,state);
},
addLoginCss()
{
$('head').append('<link rel="stylesheet" type="text/css" href="'+Authentication.serverURL+'css/login.css" />');
},
setCookie(name, value, days) {
document.cookie = name + "=" + (value || "") + "; path=/";
},
getCookie(name) {
var nameEQ = name + "=";
var ca = document.cookie.split(';');
for (var i = 0; i < ca.length; i++) {
var c = ca[i];
while (c.charAt(0) == ' ') c = c.substring(1, c.length);
if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length, c.length);
}
return null;
},
eraseCookie(name) {
document.cookie = name + '=; path=/';
let cookie = document.cookie;
cookie
},
/**
* This method equalizes storage token status with cookies. This is to be able to add from token to cookie if it already exists on LocalStorage
* Now setToken always sets on Cookie also.
* This equalization will run only when the application has inited this api and has a LocalStorage Token Key and not and Cookie Token key
*/
equalizeStorageAndCookieToken()
{
if(this.hasToken())
{
let cookie = this.getCookie(this.tokenKey);
if(cookie==null)
{
this.setToken(this.getToken());
}
}
else
{
this.eraseCookie(this.tokenKey);
}
},
/**
* This functions configure an interval to check if the authentication token is still valid for the current session
*/
configureExpirationGuard()
{
if(this.hasToken()
&& this.validationData
&& this.validationData.authenticated==true)
{
this.expirationGuardInterval = setInterval(this.expirationCheck,this.validationInterval);
}
else
{
this.logout();
}
},
/**
* This functions checks if the authentication token is still valid for the current session, if not it forces a logout.
* Returns true if expired
*/
expirationCheck()
{
if(Authentication.hasToken()
&& Authentication.validationData
&& Authentication.validationData.authenticated==true)
{
var now = new Date();
var expiration = new Date(Authentication.validationData.expirationDate);
if(now>expiration)
{
Authentication.logout();
return true;
}
}
else
{
Authentication.logout();
return true;
}
return false;
}
}
$(document).ready(function () {
// Authentication.init();
});
/**
* Authentication translation functions and data
*/
var AuthenticationTranslation = {
currentLanguage: 'pt-br',
init(lang)
{
this.currentLanguage = lang;
console.log("Initializating authentication-oauth-api translation in:" + this.currentLanguage);
},
getTranslated(key)
{
return this.translationJson[this.currentLanguage][key];
},
translationJson:
{
'pt-br':
{
'authenticationFailed':'O nome de usuário ou senha está incorreto. Verifique se CAPS LOCK está ativado. Se você receber essa mensagem novamente, entre em contato com o administrador do sistema para garantir que você tenha permissão para logar no portal.',
'submitLogin':"Entrar",
'submitCancel':"Cancelar",
'username':"Usuário",
'password':"Senha",
'logout':"Sair",
'login':"Entrar",
'reset-pass':'<PASSWORD>',
'change-pass':'<PASSWORD>ar senha',
'drop-user':'Remover conta de usuário',
'drop-user-ok':'Conta removida.',
'drop-user-fail':'Falhou ao remover a conta.',
'drop-user-confirm':'A conta será removida permanentemente. Confirma?',
'missing-user-pass':"Usuário ou senha não foram preenchidos!",
'username-validation':"Entre com o usuário",
'password-validation':"Entre com a senha"
},
'en':
{
'authenticationFailed':'The username or password is wrong. Verify if CAPS LOCK is enable. If you receive this message again, please contact the system administrator to ensure that you have permission to login in portal.',
'submitLogin':"Login",
'submitCancel':"Cancel",
'username':"Username",
'password':"<PASSWORD>",
'logout':"Logout",
'login':"Login",
'reset-pass':'<PASSWORD>',
'change-pass':'Change the password',
'drop-user':'Remove user account',
'drop-user-ok':'User account removed',
'drop-user-fail':'Failed to remove account',
'drop-user-confirm':'The account will be permanently removed. Do you confirm?',
'missing-user-pass':"Missing username or password!",
'username-validation':"Insert an username",
'password-validation':"Insert a password"
}
},
changeLanguage(lang)
{
this.currentLanguage = lang;
Authentication.buildLoginDropdownMenu();
}
}
var AuthenticationService = {
downloadFile(url, startDownloadCallback, doneDownloadCallback)
{
let anchor = document.createElement("a");
document.body.appendChild(anchor);
var bearer=null;
if(Authentication.hasToken())
{
//Check if token is expired or not. If is expired it will logout
if(Authentication.expirationCheck())
{
window.location.reload();
return;
}
bearer = "Bearer " + Authentication.getToken();
}
//Invokink callback, application should show loading or block button
if(startDownloadCallback)
{
startDownloadCallback();
}
var xhr=new XMLHttpRequest();
xhr.open("GET", url, true);
xhr.responseType = 'arraybuffer';
//Adding authorization if token is present
if(bearer!=null)
{
xhr.setRequestHeader("Authorization", bearer);
}
xhr.addEventListener('load',function()
{
if (xhr.status === 200){
var arrayBuffer = xhr.response;
var filename = AuthenticationService.getFilenameFromContentDisposition(xhr);
AuthenticationService.saveByteArray(filename, arrayBuffer);
//Invokink callback, application should hide loading or enable button
if(doneDownloadCallback)
{
doneDownloadCallback();
}
}
})
xhr.send();
},
getFilenameFromContentDisposition(xhr)
{
var filename = "";
var disposition = xhr.getResponseHeader('Content-Disposition');
if (disposition && disposition.indexOf('filename') !== -1) {
if(disposition.split("=").length==2)
{
filename=disposition.split("=")[1];
}
}
return filename;
},
saveByteArray(filename, byte) {
var blob = new Blob([byte], {type: "octet/stream"});
var link = document.createElement('a');
link.href = window.URL.createObjectURL(blob);
if(filename!="")
{
link.download = filename;
}
link.click();
},
isAuthenticated()
{
return Authentication.hasToken();
},
getBearer()
{
var bearer="";
if(this.isAuthenticated())
{
bearer = "Bearer " + Authentication.getToken();
}
return bearer;
},
getToken()
{
var token="";
if(this.isAuthenticated())
{
token = Authentication.getToken();
}
return token;
}
}
| c743af0d3c67f4e01b0766ef7e1b28285c618ad5 | [
"JavaScript",
"Markdown",
"Shell"
] | 7 | Shell | terrabrasilis/terrabrasilis-oauth-api | 944d4a56edb4f53a6911edd819c90c30faa21152 | 2cd162d8ef84207dbbfcdeef41cfec3a8bd40805 |
refs/heads/master | <file_sep>+++
author = "wheatdog"
comments = true
date = "2015-11-09T21:51:17+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "make-a-professional-game"
tags = ["wastale"]
title = "Make a professional game"
+++
[<NAME>](http://mollyrocket.com/casey/about.html) launched a brilliant
project [Handmade Hero](http://handmadehero.org/) last year. I still remember
how excited I was when I watched the very first episode of Handmade Hero. After
following 70 episodes, I start thinking what I could use from what I learned. I
am not saying I will stop following this project or I have leaned enough from
it. I just mean that I am the kind of person can learn more by doing. Therefore,
I decide to make my own game.
# Why now?
I am a college student right now. Basically, I can say this period of time is the
most free time in my whole life. I can learn whatever I want and do whatever I
want. One reason why I choose computer science as my college major is my passion
for indie games. Indie games truly rock my soul. I saw something very unique in
them. Unlike AAA games, I can feel real "connect" between players and game
designers. That definitely is what I want to do in the future.
# What genre will this game be?
I love puzzle games like [Braid](http://store.steampowered.com/app/26800/) and
roguelike games like
[The Binding of Isaac](http://store.steampowered.com/app/113200/). As my first
game, this game might be a 2D roguelike game. When I was a junior high school
student, I really enjoyed the excitement of fighting huge enemies in Monster
Hunter but sucked at the feature of collecting materials for forging weapons. In
this game, I will get rid of the concept of collecting and focus on the weapon
system and huge enemies. I will talk about them in the later posts.
# Goal
* Finish this game before I graduate.
* Ship on Windows and Linux at least since I only have machines with these
operation systems now.
# Keep going on
Actions speak louder than words. Procrastination is the horrible enemy. I will
post something new about Wastale at least every two weeks. You have my word.
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-12-03T22:51:19+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-day-3"
tags = ["devember", "wastale"]
title = "Devember Day 3 - Basic MultiPlayer Movement"
+++
Today's code is on
[this tag](https://github.com/wheatdog/wastale/tree/devember_3). The complete
code can be found in [here](https://github.com/wheatdog/wastale).
First, Let's draw some rectangle on the screen. You can check out `DrawRect`
function. Be careful that the coordinates of angles in the rectangle might sit
outside of the screen. This might cause to memory access violation.

Then, it is about physic of motion.


Also, change the unit from pixel to meter. You can feel that motion is much better.

Each entry of `player_info` struct array store the information of each player.
Now, we have multiplayer.

See you tomorrow!
<file_sep>BaseUrl = "http://stringbulbs.com/"
LanguageCode = "en-us"
Title = "String Bulbs"
paginate = 5
Copyright = "All rights reserved - 2015"
canonifyurls = true
pygmentsuseclasses = true
[params]
# Optional RSS-Link, if not provided it defaults to the standard index.xml
# set true if you are not proud of using Hugo (true will hide the footer note "Proudly published with HUGO.....")
logo = "images/logo.png"
cover = "images/bulb.jpg"
hideHUGOSupport = false
[permalinks]
post = "/post/:slug/"
[[menu.main]]
name = "Home"
weight = -120
url = "/"
[[menu.main]]
name = "<NAME>"
weight = 100
pre = "<h3>Team</h3>"
[[menu.main]]
name = "Venses Tab"
weight = 101
url = "/the-beginning-of-tab/"
[[menu.main]]
name = "Wastale"
weight = 110
pre = "<h3>Project</h3>"
url = "/tags/wastale/"
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-12-05T22:57:35+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-day-5"
tags = ["devember", "wastale"]
title = "Devember Day 5 - Collision Movement Search"
+++
Today's code is on
[this tag](https://github.com/wheatdog/wastale/tree/devember_5). The complete
code can be found in [here](https://github.com/wheatdog/wastale).
Yesterday, I implement the GJK algorithm to determine whether two rectangles are
overlapped or not. With this property, we can figure out which entities will
collide in this frame. If they does collide, "Collision Movement Search" will help
us find the right place to put the entity. You can learn a lot from
[Geometric vs. Temporal Movement Search](https://hero.handmadedev.org/videos/game-architecture/day045.html).
In video, Casey mention two method - "Search in P" and "Search in T".
I choose "Search in T" with a bound number of iteration for now.

The way I clip the entity movement contain two steps:
1. Transform fixed rectangle to the Minkowski sum of it and the movable
rectangle. And then transform the movable rectangle to a point.
2. Find the intersection point of the line segment, whose start point is the
point we just created and the direction is as same as the motion's, and the
sides of Minkowski sum rectangle.
There are some TODOs here,
1. Make the collision routine support different shapes. I will not deal with
this issue until I make the real game engine.
2. Something strange when two movable entity collide. I will take a look tomorrow.
That's it!
<file_sep>+++
author = "tab"
comments = true
date = "2015-11-11T00:00:10+08:00"
draft = false
image = ""
menu = ""
slug = "the-beginning-of-tab"
share = true
title = "The beginning of Tab"
+++
# Introduction
Hi, this is Venses Tab. When I was in junior high school, I started listening
electronic music. At that time, I quite enjoyed the music taking me floating
above mind sea. My mp3 was used to load lots of industrial-loud electronic noise,
you should wonder that why a boy had this strange passion on such noise in his
early life. You know what, because the beats are truly epic! Until now, I still
love plugging earphone, wandering in digital labyrinth.
# Play around
In the beginning of my college's life, I ask myself, "How about I compose my own
music?". Thanks to the prosperity of clubs in my college, there is a club
teaching me composing digital music using
[Ableton Live](https://en.wikipedia.org/wiki/Ableton-Live), and also I learn
some DJ skills. When time goes on, the laziness of attitude and loads of
assignment let me often get tired of practicing.
# Motivation
Luckily, Tim and I found Stringbulbs. We have lots of memories of thinking game
ideas when we were little. This time, in November, we launch a new project for
our first game. In this team, Tim is as programmer, and I am as soundtrack
producer, maybe sometimes help Tim program a little bit, ugh..., all I want to
say is I hope this work can motivate me to find my lost passion about music
composition.
# Goal?
Before 2015 ends, I promise I will finish a set for DJ performance and one
experimental music soundtrack. Also, in leisure time, I will post some my
opinion and thought about music and project here, hope you guys have a nice
day!
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-11-14T13:40:04+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-challenge"
tags = ["devember", "wastale"]
title = "The Devember challenge"
+++
I, <NAME>, will participate to the next Devember. My Devember will be
programming a part of Wastale, a game project. I promise I will program for my
Devember for at least an hour, every day of the next December. I will also write
a daily public devlog and will make the produced code publicly available on the
internet. No matter what, I will keep my promise.
The content above is the contract of [Devember](http://devember.org/), a really
interesting challenge in December.
# Detail
Wastale is a developing game inspired by
[Handmade Hero](http://handmadehero.org/). This game will focus on big, various
enemies and different kinds of weapons. I hope this game can give players
hunting excitement but not in mission-based. There are still lots of game design
decisions need to make. Obviously, I don't have to worry about them now. In
December, I will first build up the minimum windows platform layer just like
Handmade Hero did. Then, I will implement some basic stuff for a platformer
game.
# Goals
Therefore, following are my Devember goals.
* Basic Windows platform layer
* World/Map handling
* Entity handling
# DJ! Play the music
I really love the idea of Devember. Let's start the party.
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-12-08T20:36:05+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-6"
tags = ["devember", "wastale"]
title = "Devember Day 6 - Input Playback"
+++
Today's code is on
[this tag](https://github.com/wheatdog/wastale/tree/devember_6). The complete
code can be found in [here](https://github.com/wheatdog/wastale).
I think it is about time to implement "Input Playback" feature. For now, the
maximum minutes of input I can record is about one hour, which I think is fine
now.
I figure out the stuck problem. If you are interested, you can check out
[this commit](https://github.com/wheatdog/wastale/commit/f3411f9170ce00ec7d8393b53594d0f10dedc4d1).
There is another problem I think is about floating point precision.
When one player is on the other player and the player beneath press the up botton
and release it, it will sunk into the ground.

I sort of know there is something wrong in `ClipDimToValide`, especially the
value of `Theta`, but I am not sure how I want to handle it. Maybe I should
watch Week 16 of Handmade Hero, or I could leave it until I wrote the engine
version of collision detection. Let me think about it...
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-12-01T23:17:19+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-day-1"
tags = ["devember", "wastale"]
title = "Devember Day 1 - Basic Win32 Layer"
+++
Last week, I spent some time reviewing the construction of Win32 prototyping
layer mentioned in Handmade Hero weeks 1 through 5. In those weeks, Casey talk
about some basic stuff that a game need - Graphic, Sound, and Input. If you are
interested in detail, I would recommend you to watch
[Day 1 to 25 of Handmade Hero](https://hero.handmadedev.org/jace/guide/).
Most codes of `win32_wastale.*` are as same as the codes in `win32_handmade.*`.
The most different part is the sound. I will talk about in the rest of the
article. By the way, the source code of wastale is on
[Github](https://github.com/wheatdog/wastale) if you want to take a look.
Today's code is on
[this tag](https://github.com/wheatdog/wastale/tree/devember_1).
# XAudio2
Instead of using DirectSound, I choose XAudio2. There is only one ring buffer in
DirectSound. When we have reached the end of the buffer, we start from the
beginning. However, in XAudio2, we can submit several `XAUDIO2_BUFFER` through
`IXAudio2SourceVoice::SubmitSourceBuffer` method. Basicly, it is like a queue.
Each time we reach the end of a buffer, it will start playing the next buffer.
We can reduce the audio latency by keeping the same number of samples in the
queue. For example, Red line represent the frame boundary. Let's say that we run
in 30 frames per second and have 48000 samples per second. Therefore, there are
1600 samples per frame. Every time we write, we will make sure that the number
of samples in the audio queue are 2400, that is the number of samples in one and
a half frames.
[](https://flic.kr/p/BgS6Lj)
Brown arrow represent the time that we first write to the sound buffer, and the
brown rounded rectangle near the black line represent the samples we write. Next
time we update samples at the time green arrow point to. To keep the number of
samples in the queue, we will only need to write the number of samples that
green rounded rectangle stand for. If we can update samples in the audio buffer
just in the middle of the frame like arrows in the image above. We can generate
enough samples for next frame and those samples will be played right on the
frame boundary.
Unfortunately, first we cannot guarantee we always update samples at the same
time. Therefore, sound boundary may not line up with the frame boudary, which I
think is fine. Second, `IXAudio2SourceVoice::GetState` cannot return the exactly
samples we just played without using callback. Its granularity is 480 samples in
my machine. That means that we might fill less sample than what we really need
to. The worst case is that we will have the gap of 480 samples.
The easy way to solve this problem is to make the number we want to keep in the
queue larger. Another solution will be using callback which I am really not
familiar with. I think the latency right now is fine for a prototyping
layer. Therefore, I will move on to deal with other problem.
# Tomorrow
I will try to implement "Live Code Editing" feature Casey talked about in
Handmade Hero Week 5 tomorrow. Then, maybe start game engine code exploration
Thursday. Until then!
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-12-04T22:57:35+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-day-4"
tags = ["devember", "wastale"]
title = "Devember Day 4 - GJK Collision Detection"
+++
Today's code is on
[this tag](https://github.com/wheatdog/wastale/tree/devember_4). The complete
code can be found in [here](https://github.com/wheatdog/wastale).
Yesterday, I use `player_info` to deal with multiplayer problem. Today, I
spend a while thinking more harder. I realize that it may be a good time to
bring the concept of `entity` into the code.
~~~
struct entity
{
b32 Exist;
entity_type Type;
v2 ddP;
v2 dP;
v2 P;
v2 WidthHeight;
b32 Collide;
};
~~~
Following is about collision detection. I use well-known GJK algorithm. You can
learn how it work via [Casey's video](http://mollyrocket.com/849) and
[William Bittle's post](http://www.dyn4j.org/2010/04/gjk-gilbert-johnson-keerthi/).
I might write a separate post to explan GJK in chinese.
Anyway, here is what it looks like.

Really cool, right?
Tomorrow, I will make entities truly collide with each others. Until then.
<file_sep>+++
author = "wheatdog"
comments = true
date = "2015-12-02T22:47:18+08:00"
draft = false
image = ""
menu = ""
share = true
slug = "devember-day-2"
tags = ["devember", "wastale"]
title = "Devember Day 2 - Live Code Editing"
+++
Today's code is on
[this tag](https://github.com/wheatdog/wastale/tree/devember_2). The complete
code can be found in [here](https://github.com/wheatdog/wastale).
The most coolest thing I did today is "Live Code Editing" that Casey mentioned
in Handmade Hero week 5. The basic idea is to make platform independent code be
another translation unit, and compile it to a DLL. Then, we can dynamically load
it in the Win32 platform code. By doing so, we can change our game code without
reopening the executable file at some point.

I might implement the "Loop editing" feature after I got some entities on the
screen. Tomorrow, I will start doing some game code. Until then.
<file_sep>hugo -t casper server -w
| 6340bd9fb09e21aaae0878e8f046295ca6c5ff21 | [
"Markdown",
"TOML",
"Shell"
] | 11 | Markdown | wheatdog/stringbulbs | ac58237d11ecef743d879a21e833e162b5d5b724 | 1790f88a3f3226763cef36df55973eed9aad86bd |
refs/heads/master | <repo_name>atomindustries/torify<file_sep>/torify.go
package tor
import (
"context"
"fmt"
"io/ioutil"
"net/http"
"os"
"os/exec"
"runtime"
"strings"
"golang.org/x/net/proxy"
"github.com/cretz/bine/process"
"github.com/cretz/bine/process/embedded"
)
type Tor struct {
link string
host string
port int
hport int
torrc string
path string
os string
node process.Creator
}
func NewTor(host string, port int, hport int) *Tor {
// returns new structure, that will be upgraded after
rvalue := &Tor{"", host, port, hport, "", "", "", embedded.NewCreator()}
rvalue.path, _ = os.Getwd()
rvalue.path = fmt.Sprintf("%s/config", rvalue.path)
return rvalue
}
func (t *Tor) PrepareEnv() {
if !t.isHomeBrewInstalled() {
if err := t.installHomeBrew(); err != nil {
fmt.Printf("Can't install Home Brew\n")
os.Exit(1)
} else {
fmt.Printf("Home Brew installed...\n")
}
} else {
fmt.Printf("Home Brew was pre-installed...\n")
}
if !t.isNodejsInstalled() {
if err := t.installNodejs(); err != nil {
fmt.Printf("Can't install Node.js\n")
os.Exit(2)
} else {
fmt.Printf("Node.js installed...\n")
}
} else {
fmt.Printf("Node.js was pre-installed...\n")
}
if err := t.installModules(); err != nil {
fmt.Printf("Can't install npm modules\n")
os.Exit(3)
} else {
fmt.Printf("npm modules installed...\n")
}
}
func (t *Tor) RunHiddenService() {
// sets structure's hport
port := fmt.Sprintf("PORT=%d", t.hport)
path := fmt.Sprintf("%s/tortor.js", t.path)
cmd := exec.Command("node", path)
// setting up environment
env := os.Environ()
env = append(env, port)
cmd.Env = env
// runs daemon for node.js server
go cmd.Start()
// close whenever everything shuts down
defer cmd.Wait()
}
func (t *Tor) Run(torrc string) {
t.torrc = torrc
t.os = runtime.GOOS
// configures torrc file
if err := t.configureTorrc(); err != nil {
os.Exit(4)
}
// creates tor instance
p, _ := t.node.New(context.Background(), "-f", torrc)
// runs tor instance as daemon
p.Start()
// sets public .onion link to structure
link, _ := ioutil.ReadFile(fmt.Sprintf("%s/hostname", t.path))
t.link = strings.TrimSuffix(string(link), "\n")
// closes instance after os.Exit()
defer p.Wait()
}
func (t *Tor) Request(dest string) ([]byte, error) {
// formatting URI
proxyAddr := fmt.Sprintf("%s:%d", t.host, t.port)
url := fmt.Sprintf("http://%s", dest)
// create a socks5 dialer
dialer, err := proxy.SOCKS5("tcp", proxyAddr, nil, proxy.Direct)
if err != nil {
return nil, err
}
// setup a http client
httpTransport := &http.Transport{}
httpClient := &http.Client{Transport: httpTransport}
// set our socks5 as the dialer
httpTransport.Dial = dialer.Dial
// create a request
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
// use the http client to fetch the page
resp, err := httpClient.Do(req)
if err != nil {
return nil, err
}
// closing pipe after everything is done
defer resp.Body.Close()
// reading response
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
return b, nil
}
<file_sep>/getter.go
package tor
import (
"fmt"
"io/ioutil"
"strings"
)
func (t *Tor) GetHost() string {
return t.host
}
func (t *Tor) GetLink() string {
link, _ := ioutil.ReadFile(fmt.Sprintf("%s/hostname", t.path))
t.link = strings.TrimSuffix(string(link), "\n")
return t.link
}
func (t *Tor) GetPath() string {
return t.path
}
func (t *Tor) GetPort() string {
return fmt.Sprintf("%d", t.port)
}
<file_sep>/configure.go
package tor
import(
"fmt"
"io/ioutil"
"os/exec"
)
func (t *Tor) configureTorrc() error {
if t.os == "darwin" {
// formatting onion service setup
settings := fmt.Sprintf("HiddenServiceDir %s", t.path)
settings = fmt.Sprintf("%s\nHiddenServicePort 80 %s:%d", settings, t.host, t.hport)
// either creating a new file or writing to one that exists
if err := ioutil.WriteFile(t.torrc, []byte(settings), 0700); err != nil {
return err
}
}
// chmodding directory where application is running
command := fmt.Sprintf("chmod 700 %s", t.path)
if _, err := exec.Command("sh", "-c", command).Output(); err != nil {
return err
}
// chmodding server file making is executable
command = fmt.Sprintf("chmod +x %s/tortor.js", t.path)
if _, err := exec.Command("sh", "-c", command).Output(); err != nil {
return err
}
return nil
}
<file_sep>/install.go
package tor
import (
"fmt"
"os/exec"
)
func (t *Tor) installHomeBrew() error {
if t.os == "darwin" {
var command = "/usr/bin/ruby -e \"$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)\""
if _, err := exec.Command("sh", "-c", command).Output(); err != nil {
return err
}
}
return nil
}
func (t *Tor) installNodejs() error {
var command = ""
if t.os == "darwin" {
command = fmt.Sprintf("brew install node")
} else if t.os == "linux" {
command = ""
} else if t.os == "windows" {
command = ""
}
if _, err := exec.Command("sh", "-c", command).Output(); err != nil {
return err
}
return nil
}
func (t *Tor) installModules() error {
command := fmt.Sprintf("npm --prefix %s --silent install", t.path)
if _, err := exec.Command("sh", "-c", command).Output(); err != nil {
return err
}
return nil
}
<file_sep>/checker.go
package tor
import "os/exec"
func (t *Tor) isHomeBrewInstalled() bool {
if _, err := exec.Command("brew", "-v").Output(); err != nil {
return false
}
return true
}
func (t *Tor) isNodejsInstalled() bool {
if _, err := exec.Command("npm", "-v").Output(); err != nil {
return false
}
return true
}
| 2162bc9b01bf669d830956ace75598f85961f742 | [
"Go"
] | 5 | Go | atomindustries/torify | 8f9ef3836a34dbdaa55005e8fe100baef7b20a52 | d02d801cf73cf2f26cfdd8c64e38e050a095ee74 |
refs/heads/master | <file_sep>const router = require('express').Router();
const { Campus, Student } = require('../db/index');
const Sequelize = require('sequelize');
const Op = Sequelize.Op;
router.get('/', async (req, res, next) => {
try {
const allCampuses = await Campus.findAll({
include: [
{
model: Student,
required: false,
},
],
});
res.json(allCampuses);
} catch (error) {
next(error);
}
});
router.get('/:campusId', async (req, res, next) => {
try {
const aCampus = await Campus.findAll({
where: {
id: { [Op.eq]: req.params.campusId },
},
include: [
{
model: Student,
where: {
campusId: { [Op.eq]: req.params.campusId },
},
required: false,
},
],
});
res.json(aCampus);
} catch (error) {
next(error);
}
});
router.post('/', async (req, res, next) => {
try {
const newCampus = await Campus.create(req.body);
res.json(newCampus);
} catch (error) {
next(error);
}
});
router.put('/:campusId', async (req, res, next) => {
try {
const [numUpdatedRows, updatedCampusRows] = await Campus.update(req.body, {
where: { id: { [Op.eq]: req.params.campusId } },
returning: true,
});
if (numUpdatedRows === 1) {
res.json(updatedCampusRows[0]);
}
} catch (error) {
next(error);
}
});
router.delete('/:campusId', async (req, res, next) => {
try {
await Campus.destroy({
where: {
id: { [Op.eq]: req.params.campusId },
},
});
res.sendStatus(204);
} catch (error) {
next(error);
}
});
module.exports = router;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import {
thunkToGetACampusCreator,
thunkToAddStudentToCampusCreator,
thunkToRemoveStudentFromCampusCreator,
} from '../reducers/campusesReducer';
import { thunkToGetStudentsCreator } from '../reducers/studentsReducer';
import { List } from './utils';
import ConnectedUpdateCampus from './UpdateCampus';
const mapStateToProps = state => {
return {
campus: state.campusesReducer.campus,
students: state.studentsReducer.students,
};
};
const mapDispatchToProps = dispatch => {
return {
thunkToGetACampusCreator: campusId =>
dispatch(thunkToGetACampusCreator(campusId)),
thunkToAddStudentToCampusCreator: studentToAddToCampus =>
dispatch(thunkToAddStudentToCampusCreator(studentToAddToCampus)),
thunkToRemoveStudentFromCampusCreator: (
studentToRemoveFromCampus,
campusId
) =>
dispatch(
thunkToRemoveStudentFromCampusCreator(
studentToRemoveFromCampus,
campusId
)
),
thunkToGetStudentsCreator: () => dispatch(thunkToGetStudentsCreator()),
};
};
class SingleCampus extends React.Component {
constructor() {
super();
this.state = {
isClicked: false,
studentToAddId: 1,
remainingStudents: [],
};
this.handleClick = this.handleClick.bind(this);
this.handleChange = this.handleChange.bind(this);
this.handleAddStudentToCampus = this.handleAddStudentToCampus.bind(this);
this.handleRemoveStudentFromCampus = this.handleRemoveStudentFromCampus.bind(
this
);
this.updateRemainingStudents = this.updateRemainingStudents.bind(this);
}
updateRemainingStudents() {
if (this.props.students.length > 0) {
const remainingStudents = this.props.students.filter(student => {
if (this.props.campus.students.length === 0) {
return student;
} else {
if (
this.props.campus.students.every(elem => elem.id !== student.id) ===
true
) {
return student;
}
}
});
let studentToAddId = 1;
if (remainingStudents.length > 0) {
studentToAddId = remainingStudents[0].id;
}
this.setState({
remainingStudents,
studentToAddId,
});
}
}
async componentDidMount() {
await this.props.thunkToGetACampusCreator(this.props.match.params.campusId);
await this.props.thunkToGetStudentsCreator();
if (this.props.campus !== undefined) {
this.updateRemainingStudents();
}
}
handleClick(event) {
event.preventDefault();
this.setState({ isClicked: true });
}
handleChange(event) {
this.setState({
[event.target.name]: event.target.value,
});
}
async handleAddStudentToCampus() {
const aStudentToAdd = this.props.students.filter(
student => student.id === Number(this.state.studentToAddId)
);
const studentToAddToCampus = Object.fromEntries(
Object.entries(aStudentToAdd[0]).filter(
([key, value]) => typeof value !== 'object'
)
);
studentToAddToCampus.campusId = this.props.campus.id;
await this.props.thunkToAddStudentToCampusCreator(studentToAddToCampus);
this.updateRemainingStudents();
}
async handleRemoveStudentFromCampus(studentToRemoveId) {
const aStudentToRemove = this.props.students.filter(
student => student.id === Number(studentToRemoveId)
);
const studentToRemoveFromCampus = Object.fromEntries(
Object.entries(aStudentToRemove[0]).filter(
([key, value]) => typeof value !== 'object'
)
);
studentToRemoveFromCampus.campusId = null;
await this.props.thunkToRemoveStudentFromCampusCreator(
studentToRemoveFromCampus,
this.props.campus.id
);
this.updateRemainingStudents();
}
render() {
const { campus } = this.props;
return campus !== undefined ? (
<div id="aCampus">
<div className="aCampusInfo">
<img className="imageLarge" src={campus.imageUrl} />
<h2>{campus.name}</h2>
<span>{campus.address}</span>
<hr />
<p>{campus.description}</p>
</div>
<div className="aCampusInfoExtra">
<div id="aCampusInfoStudents">
<div id="aCampusInfoStudentsList">
<h4>
{campus.students === undefined
? 'No'
: `${campus.students.length}`}
{' Students enrolled at this campus'}
</h4>
{campus.students !== undefined ? (
<List
forEachOfThese={campus.students}
doThis={student => {
return (
<div
key={student.id}
className="aCampusInfoStudentsList-item"
>
<Link to={`/students/${student.id}`}>
<img className="imageTiny" src={student.imageUrl} />
<span className="textImageTiny">
{student.fullName}
</span>
</Link>
<button
id="removeFrom"
type="submit"
name="removeStudentFromCampus"
onClick={() =>
this.handleRemoveStudentFromCampus(student.id)
}
>
Remove From Campus
</button>
</div>
);
}}
unlessEmpty={() => (
<div>
<p>
Currently, there are no students registered at this
campus.
</p>
</div>
)}
/>
) : null}
</div>
<div id="student-options">
<div id="Select-student-option">
<label htmlFor="Student-select">Select student:</label>
<select
id="Student-select"
name="studentToAddId"
value={this.state.studentToAddId}
onChange={this.handleChange}
>
<List
forEachOfThese={this.state.remainingStudents}
doThis={student => {
return (
<option key={student.id} value={student.id}>
{student.fullName}
</option>
);
}}
unlessEmpty={() => <option>No students to add.</option>}
/>
</select>
<button
id="addTo"
type="submit"
name="addStudentToCampus"
onClick={this.handleAddStudentToCampus}
disabled={this.state.remainingStudents.length === 0}
>
Add To Campus
</button>
</div>
</div>
{this.state.isClicked === false ? (
<div className="aCampusInfoButtons">
<button
id="update"
type="submit"
name="updateCampus"
onClick={this.handleClick}
>
Update This Campus
</button>
</div>
) : (
<ConnectedUpdateCampus campus={campus} />
)}
</div>
</div>
</div>
) : (
<div>
<p>No campus with that Id exists !</p>
<p>
Please check the All Campuses List by clicking "Campuses" link in the
navigation bar.
</p>
</div>
);
}
}
const ConnectedSingleCampus = connect(
mapStateToProps,
mapDispatchToProps
)(SingleCampus);
export default ConnectedSingleCampus;
<file_sep>import React from 'react';
const Home = () => {
return (
<div>
<p>
Browse our Campuses List by clicking "Campuses" link in the navigation
bar !
</p>
<p>
Browse our Students List by clicking "Students" link in the navigation
bar !
</p>
</div>
);
};
export default Home;
<file_sep>import React from 'react';
const CampusForm = props => {
return (
<form id="form" onSubmit={props.handleSubmit}>
<p>
<label htmlFor="name">
Name :{' '}
<span className="warning" hidden={props.name}>
*
</span>
</label>
<input
name="name"
type="text"
value={props.name}
onChange={props.handleChange}
/>
</p>
<p>
<label htmlFor="address">
Address :{' '}
<span className="warning" hidden={props.address}>
*
</span>
</label>
<input
name="address"
type="text"
value={props.address}
onChange={props.handleChange}
/>
</p>
<p>
<label htmlFor="imageUrl">Image URL : </label>
<input
name="imageUrl"
type="text"
value={props.imageUrl}
onChange={props.handleChange}
/>
</p>
<p>
<label htmlFor="description">Description : </label>
<textarea
name="description"
rows="4"
cols="60"
value={props.description}
onChange={props.handleChange}
/>
</p>
<p>
<span className="warning" hidden={props.name && props.address}>
* Field is required !
</span>
<span className="warning">{props.errorMessage}</span>
</p>
<p>
<button
id="submit"
type="submit"
name="addCampus"
onClick={props.handleSubmit}
disabled={!props.name || !props.address}
>
{props.buttonName}
</button>
</p>
</form>
);
};
export default CampusForm;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { thunkToAddAStudentCreator } from '../reducers/studentsReducer';
import StudentForm from './StudentForm';
const mapStateToProps = state => {
return { errorMessage: state.errorsReducer.errorMessage };
};
const mapDispatchToProps = dispatch => {
return {
thunkToAddAStudentCreator: newStudent =>
dispatch(thunkToAddAStudentCreator(newStudent)),
};
};
const defaultState = {
firstName: '',
lastName: '',
email: '',
imageUrl: '',
gpa: '0.0',
errorMsg: '',
};
class NewStudent extends React.Component {
constructor(props) {
super(props);
this.state = defaultState;
this.handleSubmit = this.handleSubmit.bind(this);
this.handleChange = this.handleChange.bind(this);
this.checkForDuplicateName = this.checkForDuplicateName.bind(this);
this.checkForDuplicateEmail = this.checkForDuplicateEmail.bind(this);
}
handleChange(event) {
this.setState({
[event.target.name]: event.target.value,
});
}
checkForDuplicateName(newStudent) {
const newFullName = newStudent.firstName + ' ' + newStudent.lastName;
const duplicate = this.props.students.filter(
student => student.fullName === newFullName
);
if (duplicate.length > 0) return true;
else return false;
}
checkForDuplicateEmail(newStudent) {
const newEmail = newStudent.email;
const duplicate = this.props.students.filter(
student => student.email === newEmail
);
if (duplicate.length > 0) return true;
else return false;
}
async handleSubmit(event) {
event.preventDefault();
const newStudent = {
firstName: this.state.firstName,
lastName: this.state.lastName,
email: this.state.email,
imageUrl: this.state.imageUrl,
gpa: this.state.gpa,
};
if (this.checkForDuplicateName(newStudent) === true) {
this.setState({ errorMsg: 'Student with this name already exists.' });
} else if (this.checkForDuplicateEmail(newStudent) === true) {
this.setState({ errorMsg: 'This email address is already used.' });
} else {
await this.props.thunkToAddAStudentCreator(newStudent);
if (this.props.errorMessage === '') {
this.setState(defaultState);
} else {
this.setState({ errorMsg: this.props.errorMessage });
}
}
}
render() {
return (
<StudentForm
{...this.state}
errorMessage={this.state.errorMsg}
handleChange={this.handleChange}
handleSubmit={this.handleSubmit}
buttonName="Add This Student"
/>
);
}
}
const ConnectedNewStudent = connect(
mapStateToProps,
mapDispatchToProps
)(NewStudent);
export default ConnectedNewStudent;
<file_sep>import axios from 'axios';
import { initialState } from './index';
import { gotError, resetError } from '../reducers/errorsReducer';
// Action Types
const GOT_ALL_STUDENTS = 'GOT_ALL_STUDENTS_SUCCESSFULLY';
const GOT_A_STUDENT = 'GOT_A_STUDENT_SUCCESSFULLY';
const ADDED_A_STUDENT = 'ADDED_A_STUDENT_SUCCESSFULLY';
const DELETED_A_STUDENT = 'DELETED_A_STUDENT_SUCCESSFULLY';
const UPDATED_A_STUDENT = 'UPDATED_A_STUDENT_SUCCESSFULLY';
// Action Creators
const gotAllStudents = students => ({
type: GOT_ALL_STUDENTS,
students,
});
const gotAStudent = student => ({
type: GOT_A_STUDENT,
student,
});
const addedAStudent = student => ({
type: ADDED_A_STUDENT,
student,
});
const deletedAStudent = studentId => ({
type: DELETED_A_STUDENT,
studentId,
});
export const updatedAStudent = student => ({
type: UPDATED_A_STUDENT,
student,
});
// Thunk Creators
export const thunkToGetStudentsCreator = function() {
return async function(dispatch) {
try {
const { data } = await axios.get('/api/students');
dispatch(gotAllStudents(data));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToGetAStudentCreator = function(studentId) {
return async function(dispatch) {
try {
const { data } = await axios.get(`/api/students/${studentId}`);
dispatch(gotAStudent(data[0]));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToAddAStudentCreator = function(newStudent) {
return async function(dispatch) {
try {
const { data } = await axios.post('/api/students', newStudent);
dispatch(addedAStudent(data));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToDeleteAStudentCreator = function(studentToDeleteId) {
return async function(dispatch) {
try {
await axios.delete(`/api/students/${studentToDeleteId}`);
dispatch(deletedAStudent(studentToDeleteId));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToUpdateAStudentCreator = function(studentToUpdate) {
return async function(dispatch) {
try {
const { data } = await axios.put(
`/api/students/${studentToUpdate.id}`,
studentToUpdate
);
dispatch(updatedAStudent(data));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
// Reducer
function studentsReducer(state = initialState, action) {
switch (action.type) {
case GOT_ALL_STUDENTS:
return { ...state, students: action.students };
case GOT_A_STUDENT:
return { ...state, student: action.student };
case ADDED_A_STUDENT:
return {
...state,
students: [...state.students, action.student],
};
case DELETED_A_STUDENT:
return {
...state,
students: state.students.filter(
student => student.id !== action.studentId
),
};
case UPDATED_A_STUDENT:
return {
...state,
students: state.students.map(student => {
if (student.id !== action.student.id) return student;
else return action.student;
}),
};
default:
return state;
}
}
export default studentsReducer;
<file_sep>const router = require('express').Router();
const { Student, Campus } = require('../db/index');
const Sequelize = require('sequelize');
const Op = Sequelize.Op;
router.get('/', async (req, res, next) => {
try {
const allStudents = await Student.findAll({
include: [
{
model: Campus,
required: false,
},
],
});
res.json(allStudents);
} catch (error) {
next(error);
}
});
router.get('/:studentId', async (req, res, next) => {
try {
const aStudent = await Student.findAll({
where: {
id: { [Op.eq]: req.params.studentId },
},
include: [
{
model: Campus,
required: false,
},
],
});
res.json(aStudent);
} catch (error) {
next(error);
}
});
router.post('/', async (req, res, next) => {
try {
const newStudent = await Student.create(req.body);
res.json(newStudent);
} catch (error) {
next(error);
}
});
router.put('/:studentId', async (req, res, next) => {
try {
const [numUpdatedRows, updatedStudentRows] = await Student.update(
req.body,
{
where: { id: { [Op.eq]: req.params.studentId } },
returning: true,
}
);
if (numUpdatedRows === 1) {
res.json(updatedStudentRows[0]);
}
} catch (error) {
next(error);
}
});
router.delete('/:studentId', async (req, res, next) => {
try {
await Student.destroy({
where: {
id: { [Op.eq]: req.params.studentId },
},
});
res.sendStatus(204);
} catch (error) {
next(error);
}
});
module.exports = router;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import {
thunkToGetCampusesCreator,
thunkToDeleteACampusCreator,
} from '../reducers/campusesReducer';
import { List } from './utils';
import ConnectedNewCampus from './NewCampus';
const mapStateToProps = state => {
return { campuses: state.campusesReducer.campuses };
};
const mapDispatchToProps = dispatch => {
return {
thunkToGetCampusesCreator: () => dispatch(thunkToGetCampusesCreator()),
thunkToDeleteACampusCreator: campusToDeleteId =>
dispatch(thunkToDeleteACampusCreator(campusToDeleteId)),
};
};
class AllCampuses extends React.Component {
constructor() {
super();
this.handleDelete = this.handleDelete.bind(this);
}
async componentDidMount() {
await this.props.thunkToGetCampusesCreator();
}
async handleDelete(campusId) {
await this.props.thunkToDeleteACampusCreator(campusId);
await this.props.thunkToGetCampusesCreator();
this.setState();
}
render() {
const { campuses } = this.props;
return (
<div id="allListing">
<h2>Our Campuses</h2>
<div id="list">
<List
forEachOfThese={campuses}
doThis={campus => {
return (
<div id="list-item-campus" key={campus.id}>
<div className="list-item-campus-image">
<Link to={`/campuses/${campus.id}`}>
<img className="imageBig" src={campus.imageUrl} />
</Link>
</div>
<div className="list-item-campus-info">
<Link to={`/campuses/${campus.id}`}>{campus.name}</Link>
{campus.students !== undefined ? (
<p>
{campus.students.length === 0
? 'No'
: campus.students.length}{' '}
{campus.students.length === 1 ? 'student' : 'students'}
</p>
) : (
<p>No students</p>
)}
<button
id="delete"
type="button"
name="deleteCampus"
onClick={() => this.handleDelete(campus.id)}
>
Delete
</button>
</div>
</div>
);
}}
unlessEmpty={() => (
<div>
<p>
Currently, there are no campuses registered in the database.
</p>
</div>
)}
/>
</div>
<hr />
<div id="add-new-campus">
<ConnectedNewCampus campuses={campuses} />
</div>
</div>
);
}
}
const ConnectedAllCampuses = connect(
mapStateToProps,
mapDispatchToProps
)(AllCampuses);
export default ConnectedAllCampuses;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import {
thunkToUpdateACampusCreator,
thunkToGetCampusesCreator,
} from '../reducers/campusesReducer';
import CampusForm from './CampusForm';
const mapStateToProps = state => {
return {
errorMessage: state.errorsReducer.errorMessage,
campuses: state.campusesReducer.campuses,
};
};
const mapDispatchToProps = dispatch => {
return {
thunkToGetACampusCreator: campusId =>
dispatch(thunkToGetACampusCreator(campusId)),
thunkToUpdateACampusCreator: campusToUpdate =>
dispatch(thunkToUpdateACampusCreator(campusToUpdate)),
thunkToGetCampusesCreator: () => dispatch(thunkToGetCampusesCreator()),
};
};
class UpdateCampus extends React.Component {
constructor() {
super();
this.state = {
id: 0,
name: '',
address: '',
imageUrl: '',
description: '',
errorMsg: '',
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
this.checkForDuplicateName = this.checkForDuplicateName.bind(this);
}
async componentDidMount() {
await this.props.thunkToGetCampusesCreator();
this.setState({
id: this.props.campus.id,
name: this.props.campus.name,
address: this.props.campus.address,
imageUrl: this.props.campus.imageUrl,
description: this.props.campus.description,
});
}
handleChange(event) {
this.setState({
[event.target.name]: event.target.value,
});
}
checkForDuplicateName(campusToUpdate) {
const newName = campusToUpdate.name;
const duplicate = this.props.campuses.filter(
campus => campus.name === newName && campus.id !== campusToUpdate.id
);
if (duplicate.length > 0) return true;
else return false;
}
async handleSubmit(event) {
const campusToUpdate = {
id: this.state.id,
name: this.state.name,
address: this.state.address,
imageUrl: this.state.imageUrl,
description: this.state.description,
};
if (this.checkForDuplicateName(campusToUpdate) === true) {
event.preventDefault();
this.setState({ errorMsg: 'Campus with this name already exists.' });
} else {
await this.props.thunkToUpdateACampusCreator(campusToUpdate);
this.setState({ errorMsg: this.props.errorMessage });
}
}
render() {
return (
<div>
<CampusForm
{...this.state}
errorMessage={this.state.errorMsg}
handleChange={this.handleChange}
handleSubmit={this.handleSubmit}
buttonName="Update This Campus"
/>
</div>
);
}
}
const ConnectedUpdateCampus = connect(
mapStateToProps,
mapDispatchToProps
)(UpdateCampus);
export default ConnectedUpdateCampus;
<file_sep>import axios from 'axios';
import { initialState } from './index';
import { gotError, resetError } from './errorsReducer';
import { updatedAStudent } from './studentsReducer';
// Action Types
const GOT_ALL_CAMPUSES = 'GOT_ALL_CAMPUSES_SUCCESSFULLY';
const GOT_A_CAMPUS = 'GOT_A_CAMPUS_SUCCESSFULLY';
const ADDED_A_CAMPUS = 'ADDED_A_CAMPUS_SUCCESSFULLY';
const DELETED_A_CAMPUS = 'DELETED_A_CAMPUS_SUCCESSFULLY';
const UPDATED_A_CAMPUS = 'UPDATED_A_CAMPUS_SUCCESSFULLY';
// Action Creators
const gotAllCampuses = campuses => ({
type: GOT_ALL_CAMPUSES,
campuses,
});
const gotACampus = campus => ({
type: GOT_A_CAMPUS,
campus,
});
const addedACampus = campus => ({
type: ADDED_A_CAMPUS,
campus,
});
const deletedACampus = campusId => ({
type: DELETED_A_CAMPUS,
campusId,
});
const updatedACampus = campus => ({
type: UPDATED_A_CAMPUS,
campus,
});
// Thunk Creator
export const thunkToGetCampusesCreator = function() {
return async function(dispatch) {
try {
const { data } = await axios.get('/api/campuses');
dispatch(gotAllCampuses(data));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToGetACampusCreator = function(campusId) {
return async function(dispatch) {
try {
const { data } = await axios.get(`/api/campuses/${campusId}`);
dispatch(gotACampus(data[0]));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToAddACampusCreator = function(newCampus) {
return async function(dispatch) {
try {
const { data } = await axios.post('/api/campuses', newCampus);
dispatch(addedACampus(data));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToDeleteACampusCreator = function(campusToDeleteId) {
return async function(dispatch) {
try {
await axios.delete(`/api/campuses/${campusToDeleteId}`);
dispatch(deletedACampus(campusToDeleteId));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToUpdateACampusCreator = function(campusToUpdate) {
return async function(dispatch) {
try {
const { data } = await axios.put(
`/api/campuses/${campusToUpdate.id}`,
campusToUpdate
);
dispatch(updatedACampus(data));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToAddStudentToCampusCreator = function(studentToAddToCampus) {
return async function(dispatch) {
try {
const studentResponse = await axios.put(
`/api/students/${studentToAddToCampus.id}`,
studentToAddToCampus
);
const campusResponse = await axios.get(
`/api/campuses/${studentToAddToCampus.campusId}`
);
dispatch(updatedAStudent(studentResponse.data));
dispatch(gotACampus(campusResponse.data[0]));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
export const thunkToRemoveStudentFromCampusCreator = function(
studentToRemoveFromCampus,
campusId
) {
return async function(dispatch) {
try {
const studentResponse = await axios.put(
`/api/students/${studentToRemoveFromCampus.id}`,
studentToRemoveFromCampus
);
const campusResponse = await axios.get(`/api/campuses/${campusId}`);
dispatch(updatedAStudent(studentResponse.data));
dispatch(gotACampus(campusResponse.data[0]));
dispatch(resetError());
} catch (error) {
dispatch(gotError(error.response.data));
}
};
};
// Reducer
function campusesReducer(state = initialState, action) {
switch (action.type) {
case GOT_ALL_CAMPUSES:
return { ...state, campuses: action.campuses };
case GOT_A_CAMPUS:
return { ...state, campus: action.campus };
case ADDED_A_CAMPUS:
return { ...state, campuses: [...state.campuses, action.campus] };
case DELETED_A_CAMPUS:
return {
...state,
campuses: state.campuses.filter(
campus => campus.id !== action.campusId
),
};
case UPDATED_A_CAMPUS:
return {
...state,
campuses: state.campuses.map(campus => {
if (campus.id !== action.campus.id) return campus;
else return action.campus;
}),
};
default:
return state;
}
}
export default campusesReducer;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import {
thunkToGetAStudentCreator,
thunkToUpdateAStudentCreator,
} from '../reducers/studentsReducer';
import {
thunkToGetCampusesCreator,
thunkToAddStudentToCampusCreator,
thunkToRemoveStudentFromCampusCreator,
} from '../reducers/campusesReducer';
import { List } from './utils';
import ConnectedUpdateStudent from './UpdateStudent';
const mapStateToProps = state => {
return {
student: state.studentsReducer.student,
campuses: state.campusesReducer.campuses,
};
};
const mapDispatchToProps = dispatch => {
return {
thunkToGetCampusesCreator: () => dispatch(thunkToGetCampusesCreator()),
thunkToGetAStudentCreator: studentId =>
dispatch(thunkToGetAStudentCreator(studentId)),
thunkToUpdateAStudentCreator: studentToUpdate =>
dispatch(thunkToUpdateAStudentCreator(studentToUpdate)),
thunkToAddStudentToCampusCreator: studentToAddToCampus =>
dispatch(thunkToAddStudentToCampusCreator(studentToAddToCampus)),
thunkToRemoveStudentFromCampusCreator: (
studentToRemoveFromCampus,
campusId
) =>
dispatch(
thunkToRemoveStudentFromCampusCreator(
studentToRemoveFromCampus,
campusId
)
),
};
};
class SingleStudent extends React.Component {
constructor() {
super();
this.state = {
isClicked: false,
campusToAddToId: 1,
};
this.handleClick = this.handleClick.bind(this);
this.handleChange = this.handleChange.bind(this);
this.handleAddStudentToCampus = this.handleAddStudentToCampus.bind(this);
this.handleRemoveStudentFromCampus = this.handleRemoveStudentFromCampus.bind(
this
);
}
async componentDidMount() {
await this.props.thunkToGetAStudentCreator(
this.props.match.params.studentId
);
await this.props.thunkToGetCampusesCreator();
if (this.props.campuses.length > 0) {
this.setState({
campusToAddToId: this.props.campuses[0].id,
});
}
}
handleClick(event) {
event.preventDefault();
this.setState({ isClicked: true });
}
handleChange(event) {
this.setState({
[event.target.name]: event.target.value,
});
}
async handleAddStudentToCampus() {
const studentToAddToCampus = Object.fromEntries(
Object.entries(this.props.student).filter(
([key, value]) => typeof value !== 'object'
)
);
studentToAddToCampus.campusId = this.state.campusToAddToId;
await this.props.thunkToAddStudentToCampusCreator(studentToAddToCampus);
await this.props.thunkToGetAStudentCreator(
this.props.match.params.studentId
);
}
async handleRemoveStudentFromCampus() {
const studentToRemoveFromCampus = Object.fromEntries(
Object.entries(this.props.student).filter(
([key, value]) => typeof value !== 'object'
)
);
studentToRemoveFromCampus.campusId = null;
await this.props.thunkToRemoveStudentFromCampusCreator(
studentToRemoveFromCampus,
this.props.student.campusId
);
await this.props.thunkToGetAStudentCreator(
this.props.match.params.studentId
);
}
render() {
const { student, campuses } = this.props;
return student !== undefined ? (
<div id="aStudent">
<img className="imageBig" src={student.imageUrl} />
<h2>{student.fullName}</h2>
<p>Email: {student.email}</p>
<p>GPA: {student.gpa}</p>
{student.campus !== undefined && student.campus !== null ? (
<div>
<p>This student is registered to the campus:</p>
<div className="aStudentInfo">
<div>
<Link to={`/campuses/${student.campusId}`}>
<img className="imageSmall" src={student.campus.imageUrl} />
</Link>
</div>
<div>
<Link to={`/campuses/${student.campusId}`}>
<span className="textImageTiny">{student.campus.name}</span>
</Link>
</div>
<div>
<button
id="removeFrom"
type="submit"
name="removeStudentFromCampus"
onClick={this.handleRemoveStudentFromCampus}
>
Remove From Campus
</button>
</div>
</div>
</div>
) : (
<div>
<p>Currently, this student is not registered to a campus.</p>
<div id="campus-options">
<div id="Select-campus-option">
<label htmlFor="Campus-select">Select campus:</label>
<select
id="Campus-select"
name="campusToAddToId"
value={this.state.campusToAddToId}
onChange={this.handleChange}
>
<List
forEachOfThese={campuses}
doThis={campus => {
return (
<option key={campus.id} value={campus.id}>
{campus.name}
</option>
);
}}
unlessEmpty={() => <option>No campuses registered.</option>}
/>
</select>
<button
id="addTo"
type="submit"
name="addStudentToCampus"
onClick={this.handleAddStudentToCampus}
disabled={campuses.length === 0}
>
Add To Campus
</button>
</div>
</div>
</div>
)}
<div id="aStudentInfoButtons">
{this.state.isClicked === false ? (
<button
id="update"
type="submit"
name="updateStudent"
onClick={this.handleClick}
>
Update This Student
</button>
) : null}
{this.state.isClicked === true ? (
<ConnectedUpdateStudent student={student} />
) : null}
</div>
</div>
) : (
<div>
<p>No student with that Id exists !</p>
<p>
Please check the All Students List by clicking "Students" link in the
navigation bar.
</p>
</div>
);
}
}
const ConnectedSingleStudent = connect(
mapStateToProps,
mapDispatchToProps
)(SingleStudent);
export default ConnectedSingleStudent;
<file_sep># Academy of JavaScript
### Try it out at :
https://academy-of-javascript.herokuapp.com/
## The Premise
This is a RESTful web platform that allows the CTO of the Margaret Hamilton Academy of JavaScript to manage the Students and Campuses of that Academy.
## Utilized
- Express : to handle HTTP requests.
- PostgreSQL : to maintain database of the Students & Campuses data.
- Sequelize : to interface with a PostgreSQL database.
- React : to build the Components that display/modify the Students & Campuses data.
- Redux : to manage all important state (i.e. Students & Campuses data) in the Redux store.
- React-Redux : to connect the React Components that display/mofify the Students & Campuses data to the Redux store.
- Redux-Thunk : for handling Redux side effects logic that needs access to the Redux store, including the asynchronous logic to handle all the CRUD (Create, Read, Update & Delete) actions to be performed on the Students & Campuses data.
Add New Campus | Update A Campus
:-------------------------:|:-------------------------:
<img src="media/CampusAddNewAndUpdate_Part1.gif"> | <img src="media/CampusAddNewAndUpdate_Part2.gif">
Add Student To Campus | Remove Student From Campus
:-------------------------:|:-------------------------:
<img src="media/CampusAddRemoveStudents_Part1.gif"> | <img src="media/CampusAddRemoveStudents_Part2.gif">
Add New Student | Update A Student
:-------------------------:|:-------------------------:
<img src="media/StudentAddNewAndUpdate_Part1.gif"> | <img src="media/StudentAddNewAndUpdate_Part2.gif">
Add Student To Campus | Remove Student From Campus
:-------------------------:|:-------------------------:
<img src="media/StudentAddRemoveFromCampus_Part2.gif"> | <img src="media/StudentAddRemoveFromCampus_Part1.gif">
<file_sep>import { combineReducers } from 'redux';
import studentsReducer from './studentsReducer';
import campusesReducer from './campusesReducer';
import errorsReducer from './errorsReducer';
export const initialState = {
campuses: [],
campus: {},
students: [],
student: {},
errorMessage: '',
};
const rootReducer = combineReducers({
campusesReducer,
studentsReducer,
errorsReducer,
});
export default rootReducer;
<file_sep>import React from 'react';
import { connect } from 'react-redux';
import {
thunkToUpdateAStudentCreator,
thunkToGetStudentsCreator,
} from '../reducers/studentsReducer';
import StudentForm from './StudentForm';
const mapStateToProps = state => {
return {
errorMessage: state.errorsReducer.errorMessage,
students: state.studentsReducer.students,
};
};
const mapDispatchToProps = dispatch => {
return {
thunkToUpdateAStudentCreator: studentToUpdate =>
dispatch(thunkToUpdateAStudentCreator(studentToUpdate)),
thunkToGetStudentsCreator: () => dispatch(thunkToGetStudentsCreator()),
};
};
class UpdateStudent extends React.Component {
constructor() {
super();
this.state = {
id: 0,
firstName: '',
lastName: '',
email: '',
imageUrl: '',
gpa: '0.0',
errorMsg: '',
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
this.checkForDuplicateName = this.checkForDuplicateName.bind(this);
this.checkForDuplicateEmail = this.checkForDuplicateEmail.bind(this);
}
async componentDidMount() {
await this.props.thunkToGetStudentsCreator();
this.setState({
id: this.props.student.id,
firstName: this.props.student.firstName,
lastName: this.props.student.lastName,
email: this.props.student.email,
imageUrl: this.props.student.imageUrl,
gpa: this.props.student.gpa,
});
}
handleChange(event) {
this.setState({
[event.target.name]: event.target.value,
});
}
checkForDuplicateName(studentToUpdate) {
const newFullName =
studentToUpdate.firstName + ' ' + studentToUpdate.lastName;
const duplicate = this.props.students.filter(
student =>
student.fullName === newFullName && student.id !== studentToUpdate.id
);
if (duplicate.length > 0) return true;
else return false;
}
checkForDuplicateEmail(studentToUpdate) {
const newEmail = studentToUpdate.email;
const duplicate = this.props.students.filter(
student => student.email === newEmail && student.id !== studentToUpdate.id
);
if (duplicate.length > 0) return true;
else return false;
}
async handleSubmit(event) {
const studentToUpdate = {
id: this.state.id,
firstName: this.state.firstName,
lastName: this.state.lastName,
email: this.state.email,
imageUrl: this.state.imageUrl,
gpa: this.state.gpa,
};
if (this.checkForDuplicateName(studentToUpdate) === true) {
event.preventDefault();
this.setState({ errorMsg: 'Student with this name already exists.' });
} else if (this.checkForDuplicateEmail(studentToUpdate) === true) {
event.preventDefault();
this.setState({ errorMsg: 'This email address is already used.' });
} else {
await this.props.thunkToUpdateAStudentCreator(studentToUpdate);
this.setState({ errorMsg: this.props.errorMessage });
}
}
render() {
return (
<StudentForm
{...this.state}
errorMessage={this.state.errorMsg}
handleChange={this.handleChange}
handleSubmit={this.handleSubmit}
buttonName="Update This Student"
/>
);
}
}
const ConnectedUpdateStudent = connect(
mapStateToProps,
mapDispatchToProps
)(UpdateStudent);
export default ConnectedUpdateStudent;
| 202df2dd9ee3f7d31adb8ab2a34517ae82c2f6a3 | [
"JavaScript",
"Markdown"
] | 14 | JavaScript | nalimaye/academy-of-javascript | a31fb5f9948d176adacc5d096b3754021d4bfb70 | cea3296dab6747e679c2ec37ff122b41bb6f253d |
refs/heads/master | <repo_name>jimmyjayp/pypkgtest<file_sep>/parentpkg/childpkg/__init__.py
"""this is the child package
Import all functions from childpkg modules into one
namespace; parentpkg.childpkg
"""
from .childA import *
from .childB import *
<file_sep>/parentpkg/childpkg/childA.py
def funcA():
print("parentpkg.childpkg.childA.funcA")
<file_sep>/parentpkg/childpkg/childB.py
import parentpkg.parentA
def funcB():
print("parentpkg.childpkg.childB.funcB")
def funcB2():
print("calling parentA funcA")
parentpkg.parentA.funcA()
<file_sep>/bin/testimports.py
#!/usr/bin/env python3
# export PYTHONPATH=.../pypkgtest
import parentpkg.parentA
import parentpkg.parentB
import parentpkg.childpkg
parentpkg.parentA.funcA()
parentpkg.parentB.funcB()
parentpkg.childpkg.funcA()
parentpkg.childpkg.funcB()
parentpkg.childpkg.funcB2()
<file_sep>/parentpkg/parentA.py
def funcA():
print("parentpkg.parentA.funcA")
<file_sep>/parentpkg/__init__.py
"""this is the parent package"""
<file_sep>/parentpkg/parentB.py
def funcB():
print("parentpkg.parentB.funcB")
| 76c905425c52845634fe513ce90a77fbf01bd993 | [
"Python"
] | 7 | Python | jimmyjayp/pypkgtest | b8ddadf5af42e25c2c03183f1fb9e7b177e785c5 | 1b373cc845832c6b8d004e3e1da987f920e5e52e |
refs/heads/master | <file_sep>/*
A linked list is given such that each node contains an additional random pointer which could point to any node in the list or null.
Return a deep copy of the list.
*/
/**
* Definition for singly-linked list with a random pointer.
* struct RandomListNode {
* int label;
* RandomListNode *next, *random;
* RandomListNode(int x) : label(x), next(NULL), random(NULL) {}
* };
*/
class Solution {
public:
RandomListNode *copyRandomList(RandomListNode *head) {
return copyRandomList2(head);
}
//O(1) space, very tricky
RandomListNode *copyRandomList2(RandomListNode *head) {
//stage1
for (RandomListNode *cur = head; cur; cur = cur->next->next) {
RandomListNode *newNode = new RandomListNode(cur->label);
newNode->next = cur->next;
cur->next = newNode;
}
//stage2
for (RandomListNode *cur = head; cur; cur = cur->next->next)
if (cur->random)
cur->next->random = cur->random->next;
//stage3
RandomListNode dummy(0), *curNew = &dummy;
for (RandomListNode *cur = head; cur; cur = cur->next) {
curNew->next = cur->next;
curNew = curNew->next;
cur->next = cur->next->next;
}
return dummy.next;
}
//O(N) space
RandomListNode *copyRandomList1(RandomListNode *head) {
if(!head)
return NULL;
unordered_map<RandomListNode* , RandomListNode*> cache;
cache[NULL]=NULL;
RandomListNode* cur = head;
while(cur){
RandomListNode* copy = new RandomListNode(cur->label);
cache[cur]=copy;
cur=cur->next;
}
cur = head;
while(cur){
RandomListNode* copy = cache[cur];
copy->next = cache[cur->next];
copy->random = cache[cur->random];
cur=cur->next;
}
return cache[head];
}
};<file_sep>/*
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
*/
class Solution {
public:
int reverse(int x) {
bool neg = false;
if(x==0)
return 0;
if(x<0){
neg = true;
x *= -1;
}
int y = 0;
while(x){
y *= 10;
y += x%10;
x /= 10;
}
return neg?-y:y;
}
};<file_sep>/*
Given a singly linked list where elements are sorted in ascending order, convert it to a height balanced BST.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//O(N), not O(NlgN)
TreeNode *sortedListToBST(ListNode *head) {
if(head==NULL){
return NULL;
}
int len = 0;
ListNode* cur = head;
while(cur){
cur = cur->next;
++len;
}
TreeNode* root = buildBST(0,len-1,head);
return root;
}
TreeNode* buildBST(int b ,int e, ListNode*& head){
if(b<=e){
int m = b + (e-b)/2;
TreeNode* n = new TreeNode(0);
n->left = buildBST(b,m-1,head);
n->val=head->val;
head=head->next;
n->right = buildBST(m+1,e,head);
return n;
}
return NULL;
}
};
<file_sep>/*
Given an index k, return the kth row of the Pascal's triangle.
For example, given k = 3,
Return [1,3,3,1].
Note:
Could you optimize your algorithm to use only O(k) extra space?
*/
class Solution {
public:
vector<int> getRow(int rowIndex) {
vector<int> row(rowIndex+1,1);
for(int r = 2; r<=rowIndex; ++r){
for(int c = r-1; c >= 1 ; --c){
row[c] += row[c-1];
}
}
return row;
}
};<file_sep>/*
Given n points on a 2D plane, find the maximum number of points that lie on the same straight line.
*/
/**
* Definition for a point.
* struct Point {
* int x;
* int y;
* Point() : x(0), y(0) {}
* Point(int a, int b) : x(a), y(b) {}
* };
*/
class Solution {
public:
int maxPoints(vector<Point> &points) {
if(points.size()<=2)
return points.size();
int maxPoints = 2;
for(int i = 0; i < points.size()-1; ++i){
Point& src = points[i];
//slope <-> points except src
unordered_map<double, int> cache;
cache.clear();
//number of points same as src
int duplicates = 1;
for(int j = i + 1 ; j < points.size(); ++j){
Point& dest = points[j];
if(dest.x==src.x&&dest.y==src.y){
++duplicates;
continue;
}
double slope = 0.0;
if(src.x==dest.x)
slope = (double)INT_MAX;
else
slope = (double)(dest.y-src.y)/(double)(dest.x-src.x);
if(cache.count(slope)==0)
cache[slope]=1;
else
cache[slope]++;
}
//update local max
int count = 0;
unordered_map<double,int >::iterator iter = cache.begin();
while(iter != cache.end()){
count = max(iter->second,count);
++iter;
}
//update global max
maxPoints = max(maxPoints, count+duplicates );
}
return maxPoints;
}
};<file_sep>/*
Generate Parentheses
Given n pairs of parentheses, write a function to generate all combinations of well-formed parentheses.
For example, given n = 3, a solution set is:
"((()))", "(()())", "(())()", "()(())", "()()()"
*/
class Solution {
public:
void generateParenthesis(int pre, int post , string s, vector<string>& v){
if(pre>post||pre<0||post<0){
return;
}
if(pre==0&&post==0){
v.push_back(s);
return;
}
generateParenthesis(pre-1,post,s+"(",v);
generateParenthesis(pre,post-1,s+")",v);
}
vector<string> generateParenthesis(int n) {
vector<string> v;
if(n==0)
return v;
string s;
generateParenthesis(n,n,s,v);
return v;
}
};<file_sep>/*
Clone an undirected graph. Each node in the graph contains a label and a list of its neighbors.
OJ's undirected graph serialization:
Nodes are labeled uniquely.
We use # as a separator for each node, and , as a separator for node label and each neighbor of the node.
As an example, consider the serialized graph {0,1,2#1,2#2,2}.
The graph has a total of three nodes, and therefore contains three parts as separated by #.
First node is labeled as 0. Connect node 0 to both nodes 1 and 2.
Second node is labeled as 1. Connect node 1 to node 2.
Third node is labeled as 2. Connect node 2 to node 2 (itself), thus forming a self-cycle.
Visually, the graph looks like the following:
1
/ \
/ \
0 --- 2
/ \
\_/
*/
/**
* Definition for undirected graph.
* struct UndirectedGraphNode {
* int label;
* vector<UndirectedGraphNode *> neighbors;
* UndirectedGraphNode(int x) : label(x) {};
* };
*/
class Solution {
public:
UndirectedGraphNode *cloneGraph(UndirectedGraphNode *node) {
if(node==NULL){
return NULL;
}
unordered_map<UndirectedGraphNode*, UndirectedGraphNode*> cache;
cache[node] = new UndirectedGraphNode(node->label);
queue<UndirectedGraphNode*> q;
q.push(node);
while(!q.empty()){
UndirectedGraphNode* n = q.front();
q.pop();
for(int i = 0; i < n->neighbors.size(); ++i ){
UndirectedGraphNode* nb = n->neighbors[i];
if( !cache.count(nb) ){
cache[nb] = new UndirectedGraphNode(nb->label);
(cache[n]->neighbors).push_back(cache[nb]);
q.push(nb);
}
else{
(cache[n]->neighbors).push_back(cache[nb]);
}
}
}
return cache[node];
}
};<file_sep>/*
Implement strStr().
Returns a pointer to the first occurrence of needle in haystack, or null if needle is not part of haystack.
*/
class Solution {
public:
char * strStr(char * haystack, char * needle) {
if (!haystack || !needle) return haystack;
int n = strlen(haystack);
int m = strlen(needle);
if (n < m) return NULL;
if (m == 0) return haystack;
return strStr3(haystack, n, needle, m);
}
// brute force, takes O(n*m) time
char *strStr1(char *haystack, int n, char *needle, int m) {
int i = 0;
while (i < n-m+1) {
int j = 0;
while (j < m && haystack[i] == needle[j]) {
i++, j++;
}
if (j == m) return haystack+(i-j);
i = i-j+1;
}
return NULL;
}
/*
// Rabin-Karp (RK), takes O(n+m) times, but O(n*m) worst case
char * strStr2(char * haystack, int n, char * needle, int m) {
int hn = 0;
for (int i = 0; i < m; i++) hn = mod(hn*B+needle[i], M);
int hh = 0;
for (int i = 0; i < m; i++) hh = mod(hh*B+haystack[i], M);
if (hh == hn) return haystack;
int E = 1;
for (int i = 1; i < m; i++) E = mod(E*B, M);
for (int i = m; i < n; i++) {
hh = mod(hh-mod(haystack[i-m]*E, M), M);
hh = mod(hh*B+haystack[i], M);
if (hh == hn) {
int j = 0;
while (j < m && haystack[i+j] == needle[j]) j++;
return haystack+i-m+1;
}
}
return NULL;
}
int mod(int a, int b) {
return (a%b+b)%b;
}
*/
// Knuth-Morris-Pratt Algorithm (KMP), takes O(n+m)
char *strStr(char *haystack, char *needle) {
if (haystack == NULL || needle == NULL) return NULL;
int N = strlen(haystack);
int M = strlen(needle);
if(M==0) return haystack;
vector<int> overlay(M,-1);
//initialize overlay array
for(int i = 1; i < M; ++i){
int pre = overlay[i-1];
while(pre>=0&&needle[pre+1]!=needle[i])
pre=overlay[pre];
if(needle[pre+1]==needle[i]) overlay[i]=pre+1;
}
//search
int i = 0, j = 0;
while(i<N){
if(haystack[i]==needle[j]){
++i; ++j;
if(j==M) return haystack+i-M;
}
else{
if(j==0) ++i;
else j = overlay[j-1] + 1;
}
}
return NULL;
}
};<file_sep>/*
Text Justification
Given an array of words and a length L, format the text such that each line has exactly L characters and is fully (left and right) justified.
You should pack your words in a greedy approach; that is, pack as many words as you can in each line. Pad extra spaces ' ' when necessary so that each line has exactly L characters.
Extra spaces between words should be distributed as evenly as possible. If the number of spaces on a line do not divide evenly between words, the empty slots on the left will be assigned more spaces than the slots on the right.
For the last line of text, it should be left justified and no extra space is inserted between words.
For example,
words: ["This", "is", "an", "example", "of", "text", "justification."]
L: 16.
Return the formatted lines as:
[
"This is an",
"example of text",
"justification. "
]
Note: Each word is guaranteed not to exceed L in length.
click to show corner cases.
Corner Cases:
A line other than the last line might contain only one word. What should you do in this case?
In this case, that line should be left-justified.
*/
//EPI 12.13 pretty printing, similar but different and more difficult
class Solution {
public:
string formatLine(const vector<string>& words,int L){
string line(words[0]);
if(words.size()==1){//only one word
line += string(L-line.size(),' ');
return line;
}
//distribute spaces
int chars = 0;
for(int i = 0; i < words.size(); ++i){
chars += words[i].size();
}
int spaces = L-chars;
int avg = spaces/(words.size()-1); //average extra spaces
int head = spaces - (words.size()-1)*avg; //more heading spaces on left
for(int i = 1; i < words.size(); ++i){
if(head-->0) line += " ";
line += string(avg,' ') + words[i];
}
return line;
}
vector<string> fullJustify(vector<string> &words, int L) {
vector<string> v; //return set
if(words.size()==0)
return v;
vector<string> curLine(1,words[0]);
int curLen = words[0].size();
for(int i = 1; i < words.size(); ++i){
string& word = words[i];
if(curLen + 1 + word.size()<=L){//fits into current line;
curLine.push_back(word);
curLen = curLen + 1 + word.size();
}
else{ //new line
v.push_back(formatLine(curLine,L));
curLine.clear();
curLine.push_back(word);
curLen = word.size();
}
}
string line(curLine[0]); //last line, no extra space
for(int i = 1 ; i < curLine.size(); ++i)
line += " " + curLine[i];
line += string(L-line.size(),' ');
v.push_back(line);
return v;
}
};<file_sep>/*
Given a sorted linked list, delete all nodes that have duplicate numbers, leaving only distinct numbers from the original list.
For example,
Given 1->2->3->3->4->4->5, return 1->2->5.
Given 1->1->1->2->3, return 2->3.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *deleteDuplicates(ListNode *head) {
if(head==NULL)
return NULL;
map<int,int> cache;
ListNode* cur = head;
while(cur){
cache[cur->val]++;
cur=cur->next;
}
for(map<int,int>::iterator iter = cache.begin() ; iter != cache.end(); ){
if(iter->second>1){
cache.erase(iter++);
}
else{
++iter;
}
}
ListNode* h = NULL;
ListNode* t = NULL;
for(map<int,int>::iterator iter = cache.begin() ; iter != cache.end(); ++iter){
ListNode* n = new ListNode(iter->first);
if(!h){
h = n;
t = n;
}
else{
t->next = n;
t = n;
}
}
return h;
}
};
//O(1) space
//iterative
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
/*
class Solution {
public:
ListNode *deleteDuplicates(ListNode *head) {
return deleteDuplicates_1(head);
}
ListNode *deleteDuplicates_1(ListNode *head) {
ListNode dummy(0), *cur = &dummy;
dummy.next = head;
while (cur->next)
{
ListNode *node = cur->next;
int val = node->val;
if (!node->next || node->next->val != val) {
cur = cur->next;
continue;
}
while (node && node->val == val) {
ListNode *del = node;
node = node->next;
delete del;
}
cur->next = node;
}
return dummy.next;
}
}
*/
//recursive
/*
ListNode *deleteDuplicates(ListNode *head) {
if (!head || !(head->next) ) return head;
if ( head->val != head->next->val) {
head->next = deleteDuplicates(head->next);
return head;
}
int v = head->val;
ListNode* next = head;
while(next&&next->val==v){
ListNode* n = next;
next = next->next;
delete n;
}
return deleteDuplicates(next);
}
*/<file_sep>/*
Given a collection of intervals, merge all overlapping intervals.
For example,
Given [1,3],[2,6],[8,10],[15,18],
return [1,6],[8,10],[15,18].
*/
/**
* Definition for an interval.
* struct Interval {
* int start;
* int end;
* Interval() : start(0), end(0) {}
* Interval(int s, int e) : start(s), end(e) {}
* };
*/
class Solution {
public:
struct MyComp{
bool operator()(const Interval& i1, const Interval& i2){
return i1.start<i2.start;
}
};
vector<Interval> merge(vector<Interval> &intervals) {
if(intervals.size()==0||intervals.size()==1)
return intervals;
sort(intervals.begin(),intervals.end(),MyComp());
vector<Interval> v;
Interval curI = intervals[0];
int index = 1;
while(index<intervals.size()){
Interval newI = intervals[index];
if(newI.start<=curI.end){
curI.end = max(curI.end, newI.end);
}
else{
v.push_back(curI);
curI = newI;
}
++index;
}
v.push_back(curI);
return v;
}
};
<file_sep>/*
There are N children standing in a line. Each child is assigned a rating value.
You are giving candies to these children subjected to the following requirements:
Each child must have at least one candy.
Children with a higher rating get more candies than their neighbors.
What is the minimum candies you must give?
*/
class Solution {
public:
int candy(vector<int> &ratings) {
vector<int> gives(ratings.size(),1);
for(int i = 1; i < ratings.size(); ++i){
if(ratings[i]>ratings[i-1]){
gives[i] = max(gives[i],gives[i-1]+1);
}
}
for(int i = ratings.size() - 2; i >=0 ; --i){
if(ratings[i]>ratings[i+1]){
gives[i] = max(gives[i],gives[i+1]+1);
}
}
int total = 0;
for(int i = 0; i < ratings.size(); ++i){
total += gives[i];
}
return total;
}
};<file_sep>/*
Given s1, s2, s3, find whether s3 is formed by the interleaving of s1 and s2.
For example,
Given:
s1 = "aabcc",
s2 = "dbbca",
When s3 = "aadbbcbcac", return true.
When s3 = "aadbbbaccc", return false.
*/
class Solution {
public:
//reduce exponetial to polynomial
//dp1
unordered_set<string> bad;
bool isInterleave(string s1, string s2, string s3) {
static string BAD("#_#");
if(s1.size()+s2.size()!=s3.size())
return false;
if(s1=="") return s2==s3;
if(s2=="") return s1==s3;
if(bad.count(s1+BAD+s2+BAD+s3))
return false;
if(s1[0]==s3[0] && isInterleave(s1.substr(1),s2,s3.substr(1)) )
return true;
if(s2[0]==s3[0] && isInterleave(s1,s2.substr(1),s3.substr(1)) )
return true;
bad.insert(s1+BAD+s2+BAD+s3);
return false;
}
//dp2
bool isInterleave2(string & s1, string & s2, string & s3) {
int M = s1.size(), N = s2.size();
if (M + N != s3.size()) return false;
vector<vector<bool> > dp(M + 1, vector<bool>(N + 1, 0));
for (int i = 0; i <= M; i++) {
for (int j = 0; j <= N; j++) {
if (i == 0 && j == 0) dp[i][j] = true;
else if (i == 0) dp[i][j] = (s2[j - 1] == s3[j - 1]) && dp[i][j - 1];
else if (j == 0) dp[i][j] = (s1[i - 1] == s3[i - 1]) && dp[i - 1][j];
else dp[i][j] = ((s2[j - 1] == s3[i + j - 1]) && dp[i][j - 1]) || ((s1[i - 1] == s3[i + j - 1]) && dp[i - 1][j]);
}
}
return dp[M][N];
}
};
<file_sep>/*
Follow up for "Remove Duplicates":
What if duplicates are allowed at most twice?
For example,
Given sorted array A = [1,1,1,2,2,3],
Your function should return length = 5, and A is now [1,1,2,2,3].
*/
//duplicates are allowed at most twice
class Solution {
public:
int removeDuplicates(int A[], int n) {
int curLen = 0;
int b = 0;
int e = 1;
while(b<n){
while(e<n&&A[e]==A[b])
++e;
int num = min(2,e-b);
while(num-- > 0 )
A[curLen++] = A[b];
b = e;
++e;
}
return curLen;
}
};<file_sep>/*
Given n, generate all structurally unique BST's (binary search trees) that store values 1...n.
For example,
Given n = 3, your program should return all 5 unique BST's shown below.
1 3 3 2 1
\ / / / \ \
3 2 1 1 3 2
/ / \ \
2 1 2 3
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ's Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where '#' signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}".
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//unordered_map<pair<int,int>, vector<TreeNode*> > cache;
vector<TreeNode *> generateTrees(int b, int e){
vector<TreeNode *> v;
if(b>e){
v.push_back(NULL);
return v;
}
if(b==e){
TreeNode* n = new TreeNode(b);
v.push_back(n);
return v;
}
for(int m = b; m <= e; ++m){
vector<TreeNode *> left = generateTrees(b,m-1);
vector<TreeNode *> right = generateTrees(m+1,e);
for(int i = 0; i< left.size(); ++i){
for(int j =0 ;j<right.size();++j){
TreeNode* n = new TreeNode(m);
n->left = left[i];
n->right = right[j];
v.push_back(n);
}
}
}
return v;
}
vector<TreeNode *> generateTrees(int n) {
return generateTrees(1,n);
}
};<file_sep>/*
Given n non-negative integers representing an elevation map where the width of each bar is 1, compute how much water it is able to trap after raining.
For example,
Given [0,1,0,2,1,0,1,3,2,1,2,1], return 6.
The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1]. In this case, 6 units of rain water (blue section) are being trapped. Thanks Marcos for contributing this image!
*/
class Solution {
public:
int trap(int A[], int n) {
vector<int> left(n,0);
vector<int> right(n,0);
int max = 0;
for(int i = 0; i < n; ++i){
max = std::max(max,A[i]);
left[i]=max;
}
max = 0;
for(int i = n-1; i >- 0; --i){
max = std::max(max,A[i]);
right[i]=max;
}
int water = 0 ;
for(int i = 1; i < n-1; ++i){
water += min(left[i],right[i]) - A[i];
}
return water;
}
};<file_sep>/*
Given two words (start and end), and a dictionary, find all shortest transformation sequence(s) from start to end, such that:
Only one letter can be changed at a time
Each intermediate word must exist in the dictionary
For example,
Given:
start = "hit"
end = "cog"
dict = ["hot","dot","dog","lot","log"]
Return
[
["hit","hot","dot","dog","cog"],
["hit","hot","lot","log","cog"]
]
Note:
All words have the same length.
All words contain only lowercase alphabetic characters.
*/
//BFS to generate the graph then DFS to find pathes
class Solution {
public:
vector<string > getNeighbors(const string& s, unordered_set<string> &dict){
vector<string > neighbors;
string start(s);
for(int i = 0; i < start.size(); ++i){
for(int j = 'a' ; j <= 'z' ; ++j){
start[i] = j;
if(start!=s && dict.count(start) > 0){
neighbors.push_back(start);
}
}
start = s;
}
return neighbors;
}
vector<vector<string> > res;
vector<vector<string>> findLadders(string start, string end, unordered_set<string> &dict) {
unordered_map<string, vector<string> > graph;//build graph from start to end
unordered_set<string> visited; //track visited
unordered_set<string> curr, prev; // bfs levels
prev.insert(start);
visited.insert(start);
//BFS to build graph
while(!prev.empty()){
//mark prev visited
for(unordered_set<string>::iterator iter = prev.begin(); iter != prev.end(); ++iter){
visited.insert(*iter);
}
//get curr level
for(unordered_set<string>::iterator iter = prev.begin(); iter != prev.end(); ++iter){
const string& preStr = *iter;
vector<string> neighbors = getNeighbors(preStr,dict);
for(int i = 0; i < neighbors.size(); ++i){
string& curStr = neighbors[i];
if(visited.count(curStr)==0){ // not visited before
curr.insert(curStr);
graph[preStr].push_back(curStr);
//visited.insert(curStr);//Don't mark visited here, otherwise would block other paths
}
}
}
if(curr.size()==0) return res; //not found
if(curr.count(end)>0) break; //found
prev = curr;
curr.clear();
}
//DFS to find all paths
vector<string> path;
getPath(start, end, graph, path);
return res;
}
void getPath(string& start, string& end, unordered_map<string,vector<string> >& graph, vector<string> & path) {
path.push_back(start);
if (start == end) {
res.push_back(vector<string>(path.begin(), path.end()));
}
else {
vector<string>& childs = graph[start];
for (int i = 0; i < childs.size(); ++i ) {
getPath(childs[i], end, graph, path);
}
}
path.pop_back();
}
};
<file_sep>/*
Given a singly linked list L: L0→L1→…→Ln-1→Ln,
reorder it to: L0→Ln→L1→Ln-1→L2→Ln-2→…
You must do this in-place without altering the nodes' values.
For example,
Given {1,2,3,4}, reorder it to {1,4,2,3}.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode* reverse(ListNode* head){
if(!head||!(head->next)){
return head;
}
ListNode* newHead = reverse(head->next);
head->next->next = head;
head->next = NULL;
return newHead;
}
ListNode* interleave(ListNode* head, ListNode* head2){
if(!head){
return head2;
}
if(!head2){
return head;
}
ListNode* newHead = head;
head = head->next;
newHead->next = interleave(head2,head);
return newHead;
}
void reorderList(ListNode *head) {
if(!head||!(head->next))
return;
ListNode* slow = head;
ListNode* fast = head->next;
while(fast&&fast->next){
fast = fast->next->next;
slow = slow ->next;
}
ListNode* head2 = slow->next;
slow->next = NULL;
//reverse head2;
head2 = reverse(head2);
//interleave head head2
head=interleave(head,head2);
}
};
<file_sep>/*
Given two integers n and k, return all possible combinations of k numbers out of 1 ... n.
For example,
If n = 4 and k = 2, a solution is:
[
[2,4],
[3,4],
[2,3],
[1,2],
[1,3],
[1,4],
]
*/
class Solution {
public:
vector<vector<int> > combine(int n, int k) {
vector<vector<int> > vv;
if(n==0||k==0||k>n)
return vv;
vector<int> v;
for(int i = 1; i <= n ; ++i){
v.push_back(i);
}
vector<int> cur;
combine2(&vv,cur,0,v,k);
return vv;
}
//logic: escape all previous and choosing one
void combine(vector<vector<int> >* vv, vector<int>& cur, int index, vector<int>& v, int k){
if(cur.size()==k){
vv->push_back(cur);
return;
}
for(int i = index; i < v.size(); ++i ){
cur.push_back(v[i]);
combine(vv,cur,i+1,v,k);
cur.pop_back();
}
}
//logic: either choose myself or escape myself
void combine2(vector<vector<int> >* vv, vector<int>& cur, int index, vector<int>& v, int k){
if(cur.size()==k){
vv->push_back(cur);
return;
}
if(index >= v.size())
return;
cur.push_back(v[index]);
combine(vv,cur,index+1,v,k);
cur.pop_back();
combine(vv,cur,index+1,v,k);
}
int nextSameBits(int bits){
int firstBit = bits&(~(bits-1));
int big = firstBit + bits;
int remains = ((big^bits)>>2) / firstBit;
return big + remains;
}
//iteratively by using bits manipulation
void combine3(vector<vector<int> >* vv , vector<int>& cur,int k){
int begin = 0;
int end = 0;
for(int i = 0 ; i < k ; ++i ) begin |= 1<<i;
end = begin << (cur.size()-k) ;
while(begin <= end){
int bits = begin;
vector<int> v;
while(bits){
int i = 0;
while( 1<<i != (bits&(~(bits-1))) ) ++i;
bits = bits & (bits-1);
v.push_back(cur[i]);
}
vv->push_back(v);
begin = nextSameBits(begin);
}
}
};
<file_sep>/*
Given a string containing only digits, restore it by returning all possible valid IP address combinations.
For example:
Given "25525511135",
return ["255.255.11.135", "255.255.111.35"]. (Order does not matter)
*/
class Solution {
public:
vector<string> restoreIpAddresses(string s) {
vector<string> v;
if(s.size()<4||s.size()>12)
return v;
vector<string> cur;
restoreIpAddresses(v,s,cur);
return v;
}
void restoreIpAddresses(vector<string>& v, string s, vector<string>& cur){
if(cur.size()==4&&s==""){
stringstream ss;
for(int i = 0 ; i < cur.size(); ++i)
ss<<'.'<<cur[i];
v.push_back(ss.str().substr(1));
return;
}
if(cur.size()==4) return;
int len = std::min(3,(int)s.size());
for(int i = 1; i <= len; ++i){
string prefix = s.substr(0,i);
if(isValid(prefix)){
cur.push_back(prefix);
restoreIpAddresses(v,s.substr(i),cur);
cur.pop_back();
}
}
}
bool isValid(string& s){
if(s=="") return false;
if(s.size()==1) return s[0]>='0'&&s[0]<='9';
if(s.size()==2) return atoi(s.c_str())>=10;
if(s.size()==3) return atoi(s.c_str())>=100&&atoi(s.c_str())<=255;
return false;
}
};
<file_sep>/*
Given a binary tree, find the maximum path sum.
The path may start and end at any node in the tree.
For example:
Given the below binary tree,
1
/ \
2 3
Return 6.
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//maxPath,maxHeight
typedef pair<int,int> Val;
int maxPathsumHelper(TreeNode* root, Val& v){
int maxChildPath = INT_MIN;
int maxRootPath = root->val;
int maxHeight = root->val;
if(root->left){
Val leftV;
maxPathsumHelper(root->left,leftV);
if(leftV.second>0)
maxRootPath += leftV.second;
maxChildPath = max(maxChildPath, leftV.first);
maxHeight = max(maxHeight,root->val+leftV.second);
}
if(root->right){
Val rightV;
maxPathsumHelper(root->right,rightV);
if(rightV.second>0)
maxRootPath += rightV.second;
maxChildPath = max(maxChildPath,rightV.first);
maxHeight = max(maxHeight,root->val+rightV.second);
}
v.first = max(maxChildPath,maxRootPath);
v.second = maxHeight;
return v.first;
}
int maxPathSum(TreeNode *root) {
if(!root)
return 0;
Val v;
maxPathsumHelper(root,v);
return v.first;
}
};
<file_sep>/*
Given an unsorted array of integers, find the length of the longest consecutive elements sequence.
For example,
Given [100, 4, 200, 1, 3, 2],
The longest consecutive elements sequence is [1, 2, 3, 4]. Return its length: 4.
Your algorithm should run in O(n) complexity.
*/
class Solution {
public:
//O(N)
int longestConsecutive(vector<int> &num) {
if(num.size()<=1)
return num.size();
unordered_set<int> hash;
for(int i = 0 ; i < num.size(); ++i){
hash.insert(num[i]);
}
int max = 0;
while(!hash.empty()){
int m = *(hash.begin());
hash.erase(m);
int l = m-1;
int h = m+1;
while(hash.count(l)>0){
hash.erase(l);
--l;
}
while(hash.count(h)>0){
hash.erase(h);
++h;
}
max = std::max(max,h-l-1);
}
return max;
}
};<file_sep>/*
Validate if a given string is numeric.
Some examples:
"0" => true
" 0.1 " => true
"abc" => false
"1 a" => false
"2e10" => true
Note: It is intended for the problem statement to be ambiguous. You should gather all requirements up front before implementing one.
*/
class Solution {
public:
bool isNumber(const char* s) {
// trim leading/trailing spaces
while (*s != '\0' && isspace(*s)) s++;
if (*s == '\0') return false;
const char *e = s + strlen(s) - 1;
while (e > s && isspace(*e)) e--;
// skip leading +/-
if (*s == '+' || *s == '-') s++;
bool num = false; // is a digit
bool dot = false; // is a '.'
bool exp = false; // is a 'e'
while (s != e + 1) {
if (*s >= '0' && *s <= '9') {
num = true;
}
else if (*s == '.') {
if(exp || dot) return false;
dot = true;
}
else if (*s == 'e') {
if(exp || num == false) return false;
exp = true;
num = false;
}
else if (*s == '+' || *s == '-') {
if (*(s-1) != 'e') return false;
}
else {
return false;
}
s++;
}
return num;
}
//state machine
bool isNumber1(const char *s) {
if(*s==0) return false;
enum Action {INVALID=0, SPACE=1, SIGN=2, DIGIT=3, DOT=4, EXPONENT=5};
const int N = 6;
//[state][action]
int transTable[][N] =
{ /* 0 1 2 3 4 5 */
0, 1, 2, 3, 4, 0, // 0: INVALID
0, 1, 2, 3, 4, 0, // 1: SPACE
0, 0, 0, 3, 4, 0, // 2: SIGN
0, 6, 0, 3, 7, 5, // 3: DIGIT
0, 0, 0, 7, 0, 0, // 4: DOT
0, 0, 2, 8, 0, 0, // 5: EXPONENT
0, 6, 0, 0, 0, 0, // 6: END WITH SPACE
0, 6, 0, 7, 0, 5, // 7: DOT AND DIGIT
0, 6, 0, 8, 0, 0, // 8: END WITH SPACE OR DIGIT
};
int state = 0;
while(*s){
Action act = INVALID;
if (*s == ' ')
act = SPACE;
else if (*s == '+' || *s == '-')
act = SIGN;
else if (isdigit(*s))
act = DIGIT;
else if (*s == '.')
act = DOT;
else if (*s == 'e')
act = EXPONENT;
state = transTable[state][act];
if (state == 0) return false;
s++;
}
bool validStates[]={0,0,0,1,0,0,1,1,1};
return validStates[state];
}
};<file_sep>/*
Given a matrix of m x n elements (m rows, n columns), return all elements of the matrix in spiral order.
For example,
Given the following matrix:
[
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]
You should return [1,2,3,6,9,8,7,4,5].
*/
class Solution {
public:
vector<int> spiralOrder(vector<vector<int> > &matrix) {
vector<int> v;
if(matrix.size()==0||matrix[0].size()==0)
return v;
vector<pair<int,int> > dirs;
dirs.push_back(pair<int,int>(0,1));
dirs.push_back(pair<int,int>(1,0));
dirs.push_back(pair<int,int>(0,-1));
dirs.push_back(pair<int,int>(-1,0));
int N = matrix.size() * matrix[0].size();
int i = 0;
int j = 0;
int cur = 0;
for(int k = 0; k < N; ++k){
v.push_back(matrix[i][j]);
matrix[i][j] = 0; //mark the border
pair<int,int> curDir = dirs[cur];
int nextI = i + curDir.first;
int nextJ = j + curDir.second;
if( nextI>=matrix.size()||nextI<0 ||
nextJ >=matrix[0].size() || nextJ < 0 ||
matrix[nextI][nextJ]==0){
cur = (cur+1)%dirs.size();
pair<int,int> curDir = dirs[cur];
nextI = i + curDir.first;
nextJ = j + curDir.second;
}
i = nextI;
j = nextJ;
}
return v;
}
};<file_sep>/*
Implement regular expression matching with support for '.' and '*'.
'.' Matches any single character.
'*' Matches zero or more of the preceding element.
The matching should cover the entire input string (not partial).
The function prototype should be:
bool isMatch(const char *s, const char *p)
Some examples:
isMatch("aa","a") → false
isMatch("aa","aa") → true
isMatch("aaa","aa") → false
isMatch("aa", "a*") → true
isMatch("aa", ".*") → true
isMatch("ab", ".*") → true
isMatch("aab", "c*a*b") → true
*/
class Solution {
public:
bool isMatch(const char *s, const char *p) {
assert(s && p);
if (*p == '\0') return *s == '\0';
if (*(p+1) != '*') {
assert(*p != '*');
return ((*p == *s) || (*p == '.' && *s != '\0')) && isMatch(s+1, p+1);
}
// next char is '*'
while ((*p == *s) || (*p == '.' && *s != '\0')) {
if (isMatch(s, p+2)) return true;
s++;
}
return isMatch(s, p+2);
};
bool isMatch1(const char *s, const char *p) {
string str(s);
string pattern(p);
isMatch1(str,pattern);
}
bool isMatch1(string s, string p){
if( p=="" )
return s=="";
if(p[1]!='*'){
if(s[0]==p[0]|| (p[0]=='.' && s!="") )
return isMatch1(s.substr(1),p.substr(1));
return false;
}
else{
if( isMatch1(s,p.substr(2)) )
return true;
for(int i = 0; i<s.size(); ++i){
if(s[i]==p[0]|| p[0]=='.'){
if(isMatch1(s.substr(i+1),p.substr(2)))
return true;
}
else{
break;
}
}
return false;
}
}
};<file_sep>/*
Given two words (start and end), and a dictionary, find the length of shortest transformation sequence from start to end, such that:
Only one letter can be changed at a time
Each intermediate word must exist in the dictionary
For example,
Given:
start = "hit"
end = "cog"
dict = ["hot","dot","dog","lot","log"]
As one shortest transformation is "hit" -> "hot" -> "dot" -> "dog" -> "cog",
return its length 5.
Note:
Return 0 if there is no such transformation sequence.
All words have the same length.
All words contain only lowercase alphabetic characters.
*/
class Solution {
public:
vector<string > getNeighbors(const string& s, unordered_set<string> &dict){
vector<string > neighbors;
string start(s);
for(int i = 0; i < start.size(); ++i){
for(int j = 'a' ; j <= 'z' ; ++j){
start[i] = j;
if( dict.count(start) > 0){
neighbors.push_back(start);
dict.erase(start);
}
}
start = s;
}
return neighbors;
}
//BFS
int ladderLength(string start, string end, unordered_set<string> &dict) {
queue<pair<string,int> > q;
q.push(pair<string,int>(start,1));
dict.erase(start);
while(q.empty()==false){
pair<string,int> pair = q.front();
q.pop();
if(pair.first==end){
return pair.second;
}
else{
vector<string > neighbors = getNeighbors( pair.first, dict);
for(int i = 0; i < neighbors.size(); ++i){
q.push( std::pair<string,int>(neighbors[i],pair.second+1) );
}
}
}
//not found
return 0;
}
//used a different hash to track nodes which were visited before
/*
class Solution {
public:
int ladderLength(string start, string end, unordered_set<string> &dict) {
if(start==end)
return 0;
typedef pair<string,int> Pair;
unordered_set<string> used;
queue<Pair> q;
q.push(make_pair(start,1));
used.insert(start);
while(!q.empty()){
Pair p = q.front();
q.pop();
if(p.first==end) return p.second;
vector<string> neighbors = getNeighbors(p.first,dict);
for(int i = 0; i < neighbors.size(); ++i){
if(used.count(neighbors[i])==0){
used.insert(neighbors[i]);
q.push(pair<string,int>(neighbors[i],p.second+1));
}
}
}
return 0;
}
vector<string > getNeighbors(const string& s, unordered_set<string> &dict){
vector<string > neighbors;
string start(s);
for(int i = 0; i < start.size(); ++i){
for(int j = 'a' ; j <= 'z' ; ++j){
start[i] = j;
if( dict.count(start) > 0){
neighbors.push_back(start);
}
}
start = s;
}
return neighbors;
}
};
*/
};<file_sep>/*
Given a set of candidate numbers (C) and a target number (T), find all unique combinations in C where the candidate numbers sums to T.
The same repeated number may be chosen from C unlimited number of times.
Note:
All numbers (including target) will be positive integers.
Elements in a combination (a1, a2, … , ak) must be in non-descending order. (ie, a1 ≤ a2 ≤ … ≤ ak).
The solution set must not contain duplicate combinations.
For example, given candidate set 2,3,6,7 and target 7,
A solution set is:
[7]
[2, 2, 3]
*/
class Solution {
public:
vector<vector<int> > vv;
void combinationSum(vector<int> &candidates, int target, int index, int sum, vector<int>& v){
if(sum==target){
vv.push_back(v);
return;
}
if(sum>target||index>=candidates.size()){
return;
}
v.push_back(candidates[index]);
combinationSum(candidates,target,index,sum+candidates[index],v);
v.pop_back();
combinationSum(candidates,target,index+1,sum,v);
}
vector<vector<int> > combinationSum(vector<int> &candidates, int target) {
sort(candidates.begin(),candidates.end());
vector<int> v;
combinationSum(candidates,target,0,0,v);
return vv;
}
};<file_sep>/*
Reverse a linked list from position m to n. Do it in-place and in one-pass.
For example:
Given 1->2->3->4->5->NULL, m = 2 and n = 4,
return 1->4->3->2->5->NULL.
Note:
Given m, n satisfy the following condition:
1 ≤ m ≤ n ≤ length of list.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode* reverse(ListNode* head){
if(!head||!(head->next))
return head;
ListNode* newHead = reverse(head->next);
head->next->next = head;
head->next = NULL;
return newHead;
}
ListNode *reverseBetween(ListNode *head, int m, int n) {
//assume n and m are within the range and n >= m
if(m==n)
return head;
//dummy head make life easier for case that m==1
ListNode dummy(0);
ListNode* newHead = &dummy;
newHead->next = head;
ListNode* preM = newHead;
ListNode* nodeN = newHead;
int index = 1 ;
while(index<=n){
if(index<m){
preM = preM->next;
}
nodeN = nodeN->next;
++index;
}
ListNode* nodeM = preM->next;
preM->next = NULL;
ListNode* postN = nodeN->next;
nodeN->next = NULL;
preM->next = reverse(nodeM);
nodeM->next = postN;
return newHead->next;
}
};<file_sep>/*
Given a set of distinct integers, S, return all possible subsets.
Note:
Elements in a subset must be in non-descending order.
The solution set must not contain duplicate subsets.
For example,
If S = [1,2,3], a solution is:
[
[3],
[1],
[2],
[1,2,3],
[1,3],
[2,3],
[1,2],
[]
]
*/
class Solution {
public:
void subsets(vector<int> &S, int index, vector<int>& v, vector<vector<int> >& vv) {
vv.push_back(v);
if(index<S.size()){
for(int i = index; i < S.size(); ++i){
v.push_back(S[i]);
subsets(S,i+1,v,vv);
v.pop_back();
}
}
}
vector<vector<int> > subsets(vector<int> &S) {
vector<vector<int> > vv;
vector<int> v;
sort(S.begin(),S.end());
subsets(S,0,v,vv);
return vv;
}
/*
void subsets(vector<int> &S, int index, vector<int>& v, vector<vector<int> >& vv) {
if(index==S.size())
{
vv.push_back(v);
return;
}
v.push_back(S[index]);
subsets(S,index+1,v,vv);
v.pop_back();
subsets(S,index+1,v,vv);
}
*/
};<file_sep>/*
Given an array S of n integers, are there elements a, b, c in S such that a + b + c = 0? Find all unique triplets in the array which gives the sum of zero.
Note:
Elements in a triplet (a,b,c) must be in non-descending order. (ie, a ≤ b ≤ c)
The solution set must not contain duplicate triplets.
For example, given array S = {-1 0 1 2 -1 -4},
A solution set is:
(-1, 0, 1)
(-1, -1, 2)
*/
class Solution {
public:
vector<vector<int> > threeSum(vector<int> &num) {
vector<vector<int> > res;
int N = num.size();
if (N < 3) return res;
sort(begin(num), end(num));
for (int k = 0; k < N-2; k++) {
if (k > 0 && num[k-1] == num[k]) continue; //remove any duplicates
int i = k+1, j = N-1;
while (i < j) {
int sum = num[k]+num[i]+num[j];
if (sum < 0) i++;
else if (sum > 0) j--;
else {
vector<int> sub;
sub.push_back(num[k]);
sub.push_back(num[i]);
sub.push_back(num[j]);
res.push_back(sub);
do { i++; } while (i < j && num[i-1] == num[i]); //remove any duplicates
do { j--; } while (i < j && num[j] == num[j+1]);
}
}
}
return res;
}
};<file_sep>/*
Given a binary tree, return the inorder traversal of its nodes' values.
For example:
Given binary tree {1,#,2,3},
1
\
2
/
3
return [1,3,2].
Note: Recursive solution is trivial, could you do it iteratively?
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ's Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where '#' signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}".
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//iterative
vector<int> inorderTraversal(TreeNode *root) {
vector<int> v;
if(root==NULL)
return v;
stack<TreeNode*> s;
TreeNode * curNode = root;
while(curNode||!s.empty()){
if(curNode){
s.push(curNode);
curNode=curNode->left;
}
else{
TreeNode* n= s.top();
s.pop();
v.push_back(n->val);
curNode=n->right;
}
}
return v;
}
};
<file_sep>/*
Implement regular expression matching with support for '.' and '*'.
'.' Matches any single character.
'*' Matches zero or more of the preceding element.
The matching should cover the entire input string (not partial).
The function prototype should be:
bool isMatch(const char *s, const char *p)
Some examples:
isMatch("aa","a") → false
isMatch("aa","aa") → true
isMatch("aaa","aa") → false
isMatch("aa", "a*") → true
isMatch("aa", ".*") → true
isMatch("ab", ".*") → true
isMatch("aab", "c*a*b") → true
*/
/*
http://blog.csdn.net/lifajun90/article/details/10582733
http://blog.csdn.net/a83610312/article/details/9750655
*/
class Solution {
public:
bool isMatch (const char *s, const char *p) {
return isMatch2(s,p);
}
//recursion
bool isMatch1 (const char *s, const char *p) {
assert(s && p);
if (*p == '\0') return *s == '\0';
if (*(p+1) != '*') {
assert(*p != '*');
return ((*p == *s) || (*p == '.' && *s != '\0')) && isMatch(s+1, p+1);
}
// next char is '*'
while ((*p == *s) || (*p == '.' && *s != '\0')) {
if (isMatch(s, p+2)) return true;
s++;
}
return isMatch(s, p+2);
}
//dp
bool isMatch2 (const char *s, const char *p) {
assert(s && p);
int ls = strlen(s);
int lp = strlen(p);
vector<vector<bool> > dp(ls+1,vector<bool>(lp+1,false) );
dp[0][0]=true;
for(int i = 1; i <=lp; ++i){
if(p[i-1]=='*') dp[0][i]=dp[0][i-2];
}
for(int i = 1; i <= ls; ++i){
for(int j = 1; j <= lp; ++j){
if(p[j-1]!='*'){
dp[i][j]=dp[i-1][j-1]&&(s[i-1]==p[j-1]||p[j-1]=='.');
}
//current char in pattern is *
else{
dp[i][j]=dp[i][j-2] || dp[i][j-1] || (dp[i-1][j]&&(s[i-1]==p[j-2]||p[j-2]=='.')) ;
}
}
}
return dp[ls][lp];
}
};<file_sep>/*
Follow up for "Unique Paths":
Now consider if some obstacles are added to the grids. How many unique paths would there be?
An obstacle and empty space is marked as 1 and 0 respectively in the grid.
For example,
There is one obstacle in the middle of a 3x3 grid as illustrated below.
[
[0,0,0],
[0,1,0],
[0,0,0]
]
The total number of unique paths is 2.
Note: m and n will be at most 100.
*/
class Solution {
public:
int uniquePathsWithObstacles(vector<vector<int> > &obstacleGrid) {
if(obstacleGrid.size()==0||obstacleGrid[0].size()==0)
return 0;
if(obstacleGrid[0][0]||obstacleGrid[obstacleGrid.size()-1][obstacleGrid[0].size()-1])
return 0;
vector<vector<int> > dp(obstacleGrid.size(), vector<int>(obstacleGrid[0].size(),0) );
dp[0][0]=1;
for(int i = 0; i < dp.size(); ++i ){
for(int j = 0; j < dp[0].size(); ++j){
//not obstacle
if(obstacleGrid[i][j]==0){
dp[i][j] += (i>0?dp[i-1][j]:0) + (j>0?dp[i][j-1]:0);
}
else{
dp[i][j] = 0;
}
}
}
return dp[dp.size()-1][dp[0].size()-1];
}
};<file_sep>/*
Merge k sorted linked lists and return it as one sorted list. Analyze and describe its complexity.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
struct MyComp{
bool operator()(ListNode* n1, ListNode* n2){
return n1->val > n2->val;
}
};
ListNode *mergeKLists(vector<ListNode *> &lists) {
if(lists.size()==0){
return NULL;
}
if(lists.size()==1){
return lists[0];
}
priority_queue<ListNode*, deque<ListNode*>, MyComp > heap;
for(int i = 0 ; i < lists.size(); ++i ){
if(lists[i]!=NULL)
heap.push(lists[i]);
}
ListNode* head = NULL;
ListNode* tail = NULL;
while(heap.empty()==false){
ListNode* newNode = heap.top();
heap.pop();
if(newNode->next){
heap.push(newNode->next);
}
if(!head){
head = newNode;
tail = newNode;
}
else{
tail->next = newNode;
tail = tail->next;
}
}
return head;
}
};<file_sep>/*
Implement wildcard pattern matching with support for '?' and '*'.
'?' Matches any single character.
'*' Matches any sequence of characters (including the empty sequence).
The matching should cover the entire input string (not partial).
The function prototype should be:
bool isMatch(const char *s, const char *p)
Some examples:
isMatch("aa","a") → false
isMatch("aa","aa") → true
isMatch("aaa","aa") → false
isMatch("aa", "*") → true
isMatch("aa", "a*") → true
isMatch("ab", "?*") → true
isMatch("aab", "c*a*b") → false
*/
class Solution {
public:
/*
Analysis:
For each element in s
If *s==*p or *p == ? which means this is a match, then goes to next element s++ p++.
If p=='*', this is also a match, but one or many chars may be available, so let us save this *'s position and the matched s position.
If not match, then we check if there is a * previously showed up,
if there is no *, return false;
if there is an *, we set current p to the next element of *, and set current s to the next saved s position
abed
?b*d**
a=?, go on, b=b, go on,
e=*, save * position star=3, save s position ss = 3, p++
e!=d, check if there was a *, yes, ss++, s=ss; p=star+1
d=d, go on, meet the end.
check the rest element in p, if all are *, true, else false;
*/
//very clever
bool isMatch(const char *s, const char *p) {
const char* star=NULL;
const char* ss=s;
while (*s){
if ((*p=='?')||(*p==*s)){s++;p++;continue;}
if (*p=='*'){star=p++; ss=s;continue;}
if (star){ p = star+1; s=++ss;continue;}
return false;
}
while (*p=='*'){p++;}
return !*p;
}
//Timeout, so many recursion even if optimized
/*
http://blog.csdn.net/a83610312/article/details/9750655
class Solution {
public:
bool isMatch(const char *s, const char *p) {
return isMatch1(s,p);
}
//greedy
bool isMatch1(const char *s, const char *p) {
assert(s && p);
const char* star=NULL;
const char* saveS=NULL;
while(*s){
if(*p==*s||*p=='?'){ ++s;++p; }
else if( *p=='*'){ star=p;saveS=s;++p;}
else if(star){ p=star;++p;s=saveS;++saveS; }
else{ return false;}
}
while(*p=='*') ++p;
return *p==0;
}
//dp, exceed memory limit
bool isMatch2 (const char *s, const char *p) {
assert(s && p);
int ls = strlen(s);
int lp = strlen(p);
vector<vector<bool> > dp(ls+1,vector<bool>(lp+1,false) );
dp[0][0]=true;
for(int i = 1; i <=lp; ++i){
if(p[i-1]=='*') dp[0][i]=dp[0][i-1];
}
for(int i = 1; i <= ls; ++i){
for(int j = 1; j <= lp; ++j){
if(p[j-1]!='*'){
dp[i][j]=dp[i-1][j-1]&&(s[i-1]==p[j-1]||p[j-1]=='?');
}
//current char in pattern is *
else{
dp[i][j]=dp[i][j-1] || dp[i-1][j-1] || dp[i-1][j] ;
}
}
}
return dp[ls][lp];
}
//dp, only use two rows
bool isMatch3 (const char *s, const char *p) {
assert(s && p);
int ls = strlen(s);
int lp = strlen(p);
vector<vector<bool> > dp(2,vector<bool>(lp+1,false) );
dp[0][0]=true;
for(int i = 1; i <=lp; ++i){
if(p[i-1]=='*') dp[0][i]=dp[0][i-1];
}
for(int i = 1; i <= ls; ++i){
int curRow=i%2;
int preRow=(i+1)%2;
for(int j = 1; j <= lp; ++j){
if(p[j-1]!='*'){
dp[curRow][j]=dp[preRow][j-1]&&(s[i-1]==p[j-1]||p[j-1]=='?');
}
//current char in pattern is *
else{
dp[curRow][j]=dp[curRow][j-1] || dp[preRow][j-1] || dp[preRow][j] ;
}
}
}
return dp[ls%2][lp];
}
};
*/
};<file_sep>/*
The string "PAYPALISHIRING" is written in a zigzag pattern on a given number of rows like this: (you may want to display this pattern in a fixed font for better legibility)
P A H N
A P L S I I G
Y I R
And then read line by line: "PAHNAPLSIIGYIR"
Write the code that will take a string and make this conversion given a number of rows:
string convert(string text, int nRows);
convert("PAYPALISHIRING", 3) should return "PAHNAPLSIIGYIR"
*/
class Solution {
public:
string convert(string s, int nRows)
{
if (nRows < 2) return s;
int N = s.size();
int L = 2 * (nRows - 1); // provide gap
string res;
res.clear();
for (int i = 0; i < N; i += L)
{
res.push_back(s[i]);
}
for (int i = 1; i < nRows - 1; i++)
{
for (int j = i; j < N; j += L)
{
int l = L-2*i; //small gap
res.push_back(s[j]);
int k = j+l;
if (k < N) res.push_back(s[k]);
}
}
for (int i = nRows - 1; i < N; i += L)
{
res.push_back(s[i]);
}
return res;
}
};<file_sep>/*
You are given two linked lists representing two non-negative numbers. The digits are stored in reverse order and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *addTwoNumbers(ListNode *l1, ListNode *l2) {
ListNode *start=NULL, *tail=NULL;
ListNode *L1=l1, *L2=l2;
int carry=0;
int sum=0;
while(L1!=NULL||L2!=NULL||carry!=0)
{
int num1=(L1==NULL)? 0 :L1->val;
int num2=(L2==NULL)? 0 :L2->val;
sum=num1+num2+carry;
carry=sum/10;
sum%=10;
//we can use a dummy head node to get-rid-of this if-else
if(start==NULL)
{
start=new ListNode(sum);
tail=start;
}
else
{
tail->next=new ListNode(sum);
tail=tail->next;
}
L1=(L1==NULL)? NULL :L1->next;
L2=(L2==NULL)? NULL :L2->next;
}
return start;
}
};<file_sep>/*
Given numRows, generate the first numRows of Pascal's triangle.
For example, given numRows = 5,
Return
[
[1],
[1,1],
[1,2,1],
[1,3,3,1],
[1,4,6,4,1]
]
*/
class Solution {
public:
vector<vector<int> > generate(int numRows) {
vector<vector<int> > res;
for (int r = 0; r < numRows; r++) res.push_back(vector<int>(r+1, 1));
for (int r = 2; r < numRows; r++)
for (int c = 1; c < r; c++)
res[r][c] = res[r-1][c-1]+res[r-1][c];
return res;
}
};<file_sep>/*
Sort a linked list in O(n log n) time using constant space complexity.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *sortList(ListNode *head) {
return mergetSort(head);
}
ListNode *mergetSort(ListNode * head) {
if (head == NULL || head->next == NULL) return head;
ListNode * frontHead = head, * backHead = split(head);
frontHead = mergetSort(frontHead);
backHead = mergetSort(backHead);
return merge(frontHead, backHead);
}
ListNode * split(ListNode * head) {
ListNode * fastNode = head, * slowNode = head;
while (fastNode->next != NULL && fastNode->next->next != NULL) fastNode = fastNode->next->next, slowNode = slowNode->next;
head = slowNode->next;
slowNode->next = NULL;
return head;
}
ListNode * merge(ListNode * frontHead, ListNode * backHead) {
ListNode * head = new ListNode(-1), * curNode = head;
while (frontHead != NULL || backHead != NULL) {
if (backHead == NULL || (frontHead != NULL && frontHead->val <= backHead->val)) curNode->next = frontHead, frontHead = frontHead->next;
else curNode->next = backHead, backHead = backHead->next;
curNode = curNode->next;
}
return deleteNode(head);
}
ListNode * deleteNode(ListNode * curNode) {
ListNode * toDel = curNode;
curNode = curNode->next;
delete toDel;
return curNode;
}
};<file_sep>/*
Given a string s1, we may represent it as a binary tree by partitioning it to two non-empty substrings recursively.
Below is one possible representation of s1 = "great":
great
/ \
gr eat
/ \ / \
g r e at
/ \
a t
To scramble the string, we may choose any non-leaf node and swap its two children.
For example, if we choose the node "gr" and swap its two children, it produces a scrambled string "rgeat".
rgeat
/ \
rg eat
/ \ / \
r g e at
/ \
a t
We say that "rgeat" is a scrambled string of "great".
Similarly, if we continue to swap the children of nodes "eat" and "at", it produces a scrambled string "rgtae".
rgtae
/ \
rg tae
/ \ / \
r g ta e
/ \
t a
We say that "rgtae" is a scrambled string of "great".
Given two strings s1 and s2 of the same length, determine if s2 is a scrambled string of s1.
*/
class Solution {
public:
bool isScramble(string s1, string s2) {
if(s1==s2)
return true;
if(s1.size()!=s2.size())
return false;
//check chars
char arr[256]={0};
for(int i = 0; i < s1.size(); ++i){
arr[s1[i]]++;
arr[s2[i]]--;
}
for(int i = 0; i < 256; ++i){
if(arr[i]!=0)
return false;
}
for(int i = 1; i < s1.size(); ++i){
string leftS1 = s1.substr(0,i);
string rightS1 = s1.substr(i);
string leftS2 = s2.substr(0,i);
string rightS2 = s2.substr(i);
string leftS2reverse = s2.substr(s1.size()-i);
string rightS2reverse = s2.substr(0,s1.size()-i);
if(isScramble(leftS1,leftS2)&&isScramble(rightS1,rightS2)){
return true;
}
if(isScramble(leftS1,leftS2reverse)&&isScramble(rightS1,rightS2reverse)){
return true;
}
}
return false;
}
// solution 1: dp. 3-dimensional dp
/*
'dp[k][i][j] == true' means string s1(start from i, length k) is a scrambled string of
string s2(start from j, length k).
*/
bool isScramble_1(string s1, string s2) {
if(s1.size() != s2.size()) return false;
int N = s1.size();
bool dp[N+1][N][N];
for (int k = 1; k <= N; ++k)
for (int i = 0; i <= N-k; ++i)
for (int j = 0; j <= N-k; ++j)
{
dp[k][i][j] = false;
if (k == 1)
dp[1][i][j] = (s1[i] == s2[j]);
for (int p = 1; p < k && !dp[k][i][j]; ++p)
if (dp[p][i][j] && dp[k-p][i+p][j+p] || dp[p][i][j+k-p] && dp[k-p][i+p][j])
dp[k][i][j] = true;
}
return dp[N][0][0];
}
};<file_sep>/*
Given an array of non-negative integers, you are initially positioned at the first index of the array.
Each element in the array represents your maximum jump length at that position.
Determine if you are able to reach the last index.
For example:
A = [2,3,1,1,4], return true.
A = [3,2,1,0,4], return false.
*/
class Solution {
public:
unordered_map<int,bool> cache;
//recursive
bool canJump(const vector<int>& v, int curPos){
if(curPos>=v.size()-1)
return true;
if(cache.count(curPos))
return cache[curPos];
int len = v[curPos];
for(int i = 1; i<len; ++i){
if(canJump(v,curPos+i)){
cache[curPos]=true;
return true;
}
}
cache[curPos]=false;
return false;
}
bool canJump(int A[], int n) {
vector<int> v(A,A+n);
//return canJump(v,0);
//return canJump2(v);
return canJump3(v);
}
//iterative
bool canJump2(const vector<int>& v){
vector<bool> reach(v.size(),false);
reach[0]=true;
for(int i = 0; i < v.size(); ++i){
if(reach[i]){
int len = v[i];
for(int j = 1; j < len; ++j){
reach[i+j]=true;
if(i+j==v.size()-1)
return true;
}
}
}
return reach[v.size()-1];
}
//fast
bool canJump3(const vector<int>& v){
int start = 0, end = 0;
while (start <= end) {
end = max(end, start+v[start]);
if (end >= v.size()-1) return true;
start++;
}
return false;
}
};
<file_sep>/*
Given a string containing just the characters '(', ')', '{', '}', '[' and ']', determine if the input string is valid.
The brackets must close in the correct order, "()" and "()[]{}" are all valid but "(]" and "([)]" are not.
*/
class Solution {
public:
unordered_map<char,char> hash;
bool isValid(string s) {
hash['(']=')';
hash['{']='}';
hash['[']=']';
if(s=="")
return true;
if(s.size()%2!=0)
return false;
stack<char> stack;
int i = 0;
while(i<s.size()){
char c = s[i];
if( c=='(' || c=='{' || c=='[' ){
stack.push(c);
}else if(c==')' || c=='}' || c==']'){
if(stack.empty()==true)
return false;
char cc = stack.top();
stack.pop();
if(hash[cc] != c)
return false;
}
else{
return false;
}
++i;
}
if(stack.empty())
return true;
return false;
}
};<file_sep>/*
Follow up for N-Queens problem.
Now, instead outputting board configurations, return the total number of distinct solutions.
*/
class Solution {
public:
bool isSafe(int board[], int n, int row, int col) {
for (int j = 0; j < col; j++)
if (board[j] == row || abs(board[j] - row) == col - j)
return false;
return true;
}
void totalNQueensHelper(int board[], int n, int col, int &count) {
if (col == n) count++;
for (int row = 0; row < n; row++) {
if (isSafe(board, n, row, col)) {
board[col] = row;
totalNQueensHelper(board, n, col+1, count);
}
}
}
int totalNQueens(int n) {
int count = 0;
int board[n];
totalNQueensHelper(board, n, 0, count);
return count;
}
};<file_sep>/*
Given a string s and a dictionary of words dict, add spaces in s to construct a sentence where each word is a valid dictionary word.
Return all such possible sentences.
For example, given
s = "catsanddog",
dict = ["cat", "cats", "and", "sand", "dog"].
A solution is ["cats and dog", "cat sand dog"].
*/
class Solution {
public:
class Solution {
public:
/*bottom-up DP*/
vector<string> wordBreak(string s, unordered_set<string> &dict) {
if(s==""||dict.size()==0)
return vector<string>();
// dp[i][j] means when string is at size of i , we have a valid word of length j before it
vector<vector<int> > dp(s.size()+1,vector<int>());
dp[0].push_back(0); //no valid word before it
for(int i = 1; i <= s.size(); ++i){ //cur size
for(int j = i; j >= 1 ; --j){ //pre len
if( dict.count(s.substr(i-j,j))>0 && dp[i-j].size()>0 ){
dp[i].push_back(j); // record word len
}
}
}
return buildVec(s,s.size(),dp);
}
vector<string> buildVec(const string& str, int len, const vector<vector<int> >& dp){
vector<string> v;
for(int i = 0; i < dp[len].size(); ++i){
int wordLen = dp[len][i];
if(wordLen==len){
v.push_back(str);
}
else{
vector<string> pres = buildVec(str.substr(0,len-wordLen),len-wordLen,dp);
for(int j = 0 ; j < pres.size(); ++j){
v.push_back(pres[j]+" " + str.substr(len-wordLen) );
}
}
}
return v;
}
};
/*
vector<string> wordBreak(string s, unordered_set<string> &dict) {
int N = s.size();
vector<vector<int> > dp(N+1, vector<int>()); // dp[i][j] means ends at i, previous valid end at j
dp[0].push_back(0);
for (int i = 1; i <= N; i++)
for (int j = 1; j <= i; j++)
if (dp[i-j].size() && (dict.find(s.substr(i-j, j)) != dict.end()))
dp[i].push_back(i-j);
return wordBreakHelper(s, dp, N);
}
vector<string> wordBreakHelper(string & str, vector<vector<int> > & dp, int i) {
vector<string> res;
for (int j = 0; j < (int)dp[i].size(); j++) {
if (dp[i][j] == 0) {
res.push_back(str.substr(0, i));
}
else {
string tmp = str.substr(dp[i][j], i-dp[i][j]);
vector<string> sub = wordBreakHelper(str, dp, dp[i][j]);
for (int k = 0; k < (int)sub.size(); k++) {
res.push_back(sub[k]+" "+tmp);
}
}
}
return res;
}
*/
/* Top-down DP
class Solution {
public:
unordered_map<string , pair<bool,vector<string> > > hash;
bool _wordBreak(string s, unordered_set<string> &dict, vector<string>* v){
if(s=="") return true;
if(hash.count(s)){
if(hash[s].first==false) return false;
else{
copy(hash[s].second.begin(),hash[s].second.end(),back_inserter(*v));
return true;
}
}
bool ok=false;
for(int i = 1; i < s.size(); ++i){
string pre = s.substr(0,i);
string post = s.substr(i);
vector<string> subV;
if(dict.count(pre)&&_wordBreak(post,dict,&subV)){
for(int j = 0; j < subV.size(); ++j){
v->push_back(pre+" "+subV[j]);
}
ok=true;
}
}
if(dict.count(s)){ v->push_back(s); ok=true;}
hash[s] = pair<bool, vector<string> >(ok,*v);
return ok;
}
vector<string> wordBreak(string s, unordered_set<string> &dict) {
vector<string> v;
if(s==""){ return v;}
_wordBreak(s,dict,&v);
return v;
}
};
*/
};<file_sep>/*
Given a collection of numbers that might contain duplicates, return all possible unique permutations.
For example,
[1,1,2] have the following unique permutations:
[1,1,2], [1,2,1], and [2,1,1].
*/
class Solution {
public:
vector<vector<int> > retVV;
void permuteHepler(vector<int> &num ,vector<bool>& used, vector<int>& v) {
if(v.size() == num.size()){
retVV.push_back(v);
return;
}
for(int i = 0; i < num.size(); ++i){
if( ! used[i] ){
//previous same item must be used
if( i>0 && (num[i-1]==num[i] && used[i-1]==false) )
continue;
used[i]=true;
v.push_back(num[i]);
permuteHepler(num,used,v);
used[i]=false;
v.pop_back();
}
}
}
vector<vector<int> > permuteUnique(vector<int> &num) {
sort(num.begin(),num.end());
vector<int> v;
vector<bool> used(num.size(),false);
permuteHepler(num,used,v);
return retVV;
}
};<file_sep>/*
Given a set of non-overlapping intervals, insert a new interval into the intervals (merge if necessary).
You may assume that the intervals were initially sorted according to their start times.
Example 1:
Given intervals [1,3],[6,9], insert and merge [2,5] in as [1,5],[6,9].
Example 2:
Given [1,2],[3,5],[6,7],[8,10],[12,16], insert and merge [4,9] in as [1,2],[3,10],[12,16].
This is because the new interval [4,9] overlaps with [3,5],[6,7],[8,10].
*/
/**
* Definition for an interval.
* struct Interval {
* int start;
* int end;
* Interval() : start(0), end(0) {}
* Interval(int s, int e) : start(s), end(e) {}
* };
*/
class Solution {
public:
vector<Interval> insert(vector<Interval> &intervals, Interval newInterval) {
vector<Interval> newV;
if(intervals.size()==0){
newV.push_back(newInterval);
return newV;
}
int i = 0;
bool used = false;
for( ; i < intervals.size(); ++i ){
Interval& interval = intervals[i];
if(interval.end < newInterval.start){
newV.push_back(interval);
}
else if(interval.start > newInterval.end){
used = true;
newV.push_back(newInterval);
break;
}
else{
int newStart = min(interval.start, newInterval.start);
int newEnd = max(interval.end, newInterval.end);
int j = i+1;
while( j < intervals.size() ){
Interval& interval = intervals[j];
if(newEnd>=interval.start){
++j;
newEnd = max(interval.end, newEnd);
}
else{
break;
}
}
used = true;
newV.push_back(Interval(newStart,newEnd));
i=j;
break;
}
}
if(i<intervals.size()){
copy(intervals.begin()+i,intervals.end(),back_inserter(newV));
}
if( used != true){
newV.push_back(newInterval);
}
return newV;
}
};<file_sep>/*
There are N gas stations along a circular route, where the amount of gas at station i is gas[i].
You have a car with an unlimited gas tank and it costs cost[i] of gas to travel from station i to its next station (i+1). You begin the journey with an empty tank at one of the gas stations.
Return the starting gas station's index if you can travel around the circuit once, otherwise return -1.
Note:
The solution is guaranteed to be unique.
*/
class Solution {
public:
//O(N)
int canCompleteCircuit(vector<int> &gas, vector<int> &cost) {
int gasSum = 0;
int costSum = 0;
int index = 0;
int profit = 0;
for(int i = 0 ; i < gas.size(); ++i){
gasSum += gas[i];
costSum += cost[i];
profit += gas[i]-cost[i];
if(gasSum<costSum){
index = i + 1;
gasSum = 0;
costSum = 0;
}
}
if(profit<0)
return -1;
else
return index;
}
//O(N^2)
int canCompleteCircuit2(vector<int> &gas, vector<int> &cost) {
int N = gas.size();
for(int i = 0; i < N; ++i){
int gasSum=0;
int costSum = 0;
int len = 0;
for(int j = i ; len != N; ++len, ++j){
if(j==N)
j=0;
gasSum += gas[j];
costSum += cost[j];
if(gasSum<costSum) // can NOT do it
break;
}
if(len==N)
return i;
}
return -1;
}
};<file_sep>/*
Given two numbers represented as strings, return multiplication of the numbers as a string.
Note: The numbers can be arbitrarily large and are non-negative
*/
class Solution {
public:
int ctoi(char c){
return c-'0';
}
char itoc(int i){
return i +'0';
}
string multiply(string num1, string num2) {
if(num1=="0"||num2=="0")
return "0";
string ret(num1.size()+num2.size(),'0');
for(int i = num1.size()-1; i>=0; --i){
int a = ctoi(num1[i]);
int j = num2.size()-1;
int carry = 0;
for(; j>=0; --j){
int b = ctoi(num2[j]);
int sum = a * b + carry + ctoi(ret[i+j+1]);
ret[i+j+1] = itoc(sum % 10);
carry = sum / 10;
}
if(carry)
ret[i+j+1] = itoc(carry);
}
int k = 0;
while(ret[k]=='0')
++k;
ret=ret.substr(k);
return ret;
}
};<file_sep>/*
Given a binary tree, return the preorder traversal of its nodes' values.
For example:
Given binary tree {1,#,2,3},
1
\
2
/
3
return [1,2,3].
Note: Recursive solution is trivial, could you do it iteratively?
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<int> preorderTraversal(TreeNode *root) {
vector<int> v;
if(root==NULL)
return v;
stack<TreeNode*> s;
TreeNode * curNode = root;
while(curNode||!s.empty()){
if(curNode){
v.push_back(curNode->val);
if(curNode->right)
s.push(curNode->right);
curNode=curNode->left;
}
else{
curNode= s.top();
s.pop();
}
}
return v;
}
};<file_sep>/*
Given an array of integers, find two numbers such that they add up to a specific target number.
The function twoSum should return indices of the two numbers such that they add up to the target, where index1 must be less than index2. Please note that your returned answers (both index1 and index2) are not zero-based.
You may assume that each input would have exactly one solution.
Input: numbers={2, 7, 11, 15}, target=9
Output: index1=1, index2=2
*/
class Solution {
public:
struct MyComp{
bool operator()(const pair<int,int>& p1, const pair<int,int>& p2){
return p1.first < p2.first;
}
};
vector<int> twoSum(vector<int> &numbers, int target) {
assert(numbers.size()>=2);
vector<int> v(2,0);
vector<pair<int,int> > pairs;
for(int i = 0; i<numbers.size();++i){
pairs.push_back(pair<int,int>(numbers[i],i+1));
}
sort(pairs.begin(),pairs.end(),MyComp());
for(int i = 0, j = pairs.size()-1; i<j ; ){
int vi = pairs[i].first;
int vj = pairs[j].first;
if( vi + vj == target){
v[0]=pairs[i].second;
v[1]=pairs[j].second;
break;
}
else if( vi + vj > target){
--j;
}
else{
++i;
}
}
if(v[0]>v[1])
swap(v[0],v[1]);
return v;
}
};<file_sep>/*
Given a string s and a dictionary of words dict, determine if s can be segmented into a space-separated sequence of one or more dictionary words.
For example, given
s = "leetcode",
dict = ["leet", "code"].
Return true because "leetcode" can be segmented as "leet code".
*/
class Solution {
public:
unordered_set<string> bad;
//DP top-down O(N^2)
bool wordBreak(string s, unordered_set<string> &dict) {
if(s=="")
return true;
if(dict.count(s)>0)
return true;
if(bad.count(s)>0)
return false;
for(int end = 1; end<=s.size();++end){
string prefix = s.substr(0,end);
string postfix = s.substr(end);
if(dict.count(prefix)>0 && wordBreak(postfix,dict)){
return true;
}
}
bad.insert(s);
return false;
}
//DP bottom-up O(N^2)
/*
bool wordBreak(string s, unordered_set<string> &dict) {
if(s=="")
return false;
vector<bool> can(s.size()+1,false);
can[0]=true;
for(int i = 1 ; i <= s.size(); ++i){
for(int j = i; j>=1; --j){
if(can[j-1]&&dict.count(s.substr(j-1,i-j+1))>0) { can[i]=true;break; }
}
}
return can[s.size()];
}
*/
};<file_sep>/*
Given a string containing just the characters '(' and ')', find the length of the longest valid (well-formed) parentheses substring.
For "(()", the longest valid parentheses substring is "()", which has length = 2.
Another example is ")()())", where the longest valid parentheses substring is "()()", which has length = 4
*/
class Solution {
public:
int longestValidParentheses(string s) {
return longestValidParentheses2(s);
}
int longestValidParentheses1(string & s) {
int N = s.size(), res = 0, last = -1;
stack<int> stk;
for (int i = 0; i < (int)s.size(); i++) {
if (s[i] == '(') {
stk.push(i);
continue;
}
if (stk.empty()) {
last = i; //remember where the latest valid position starts
}
else {
stk.pop();
res = max(res, stk.empty() ? (i - last) : (i - stk.top()));
}
}
return res;
}
int longestValidParentheses2(string & s) {
int N = s.size();
return max(longestValidParenthesesHelper(s, '(', 0, N, 1), //(((((((((((((((()
longestValidParenthesesHelper(s, ')', N - 1, -1, -1)); //())))))))))))))))
}
int longestValidParenthesesHelper(string & s, char c, int start, int end, int step) {
int res = 0, counter_c = 0, pair = 0; // counts of c, and valid pairs
for (int i = start; i != end; i += step) {
if (s[i] == c) counter_c++ ;
else counter_c--, pair++;
if (counter_c == 0) res = max(res, pair*2);
else if (counter_c < 0) counter_c = 0, pair = 0;
}
return res;
}
};<file_sep>/*
Given a 2D board containing 'X' and 'O', capture all regions surrounded by 'X'.
A region is captured by flipping all 'O's into 'X's in that surrounded region .
For example,
X X X X
X O O X
X X O X
X O X X
After running your function, the board should be:
X X X X
X X X X
X X X X
X O X X
*/
class Solution {
public:
void solve(vector<vector<char>> &board) {
if(board.size()<3||board[0].size()<3){
return;
}
solve2(board);
}
void solve2(vector<vector<char>> &board)
{
if (board.empty()) return;
int M = board.size();
int N = board[0].size();
for (int i = 0; i < M; i++)
{
if (board[i][0] == 'O') bfs(board, M, N, i, 0);
if (board[i][N-1] == 'O') bfs(board, M, N, i, N-1);
}
for (int j = 1; j < N-1; j++)
{
if (board[0][j] == 'O') bfs(board, M, N, 0, j);
if (board[M-1][j] == 'O') bfs(board, M, N, M-1, j);
}
for (int i = 0; i < M; i++)
for (int j = 0; j < N; j++)
if (board[i][j] == 'O') board[i][j] = 'X';
else if (board[i][j] == 'D') board[i][j] = 'O';
}
void bfs(vector<vector<char>> &board, int M, int N, int i, int j)
{
queue<pair<int, int>> queue;
board[i][j] = 'D';
queue.push(make_pair(i, j));
while (!queue.empty())
{
i = queue.front().first;
j = queue.front().second;
queue.pop();
if (i > 1 && board[i-1][j] == 'O')
{
board[i-1][j] = 'D';
queue.push(make_pair(i-1, j));
}
if (i < M-1 && board[i+1][j] == 'O')
{
board[i+1][j] = 'D';
queue.push(make_pair(i+1, j));
}
if (j > 1 && board[i][j-1] == 'O')
{
board[i][j-1] = 'D';
queue.push(make_pair(i, j-1));
}
if (j < N-1 && board[i][j+1] == 'O')
{
board[i][j+1] = 'D';
queue.push(make_pair(i, j+1));
}
}
}
/*
void solve(vector<vector<char>> &board) {
if(board.size()<3||board[0].size()<3)
return;
for(int i = 0 ; i < board[0].size(); ++i){
if(board[0][i]=='O') dfs(board,0,i);
if(board[board.size()-1][i]=='O') dfs(board,board.size()-1,i);
}
for(int i = 0 ; i < board.size(); ++i){
if(board[i][0]=='O') dfs(board,i,0);
if(board[i][board[0].size()-1]=='O') dfs(board,i,board[0].size()-1);
}
for(int i = 0; i < board.size(); ++i){
for(int j = 0; j < board[0].size(); ++j){
if(board[i][j]=='B') board[i][j]='O';
else if(board[i][j]=='O') board[i][j]='X';
}
}
return;
}
void dfs(vector<vector<char>> &board, int i, int j){
if(i<0||i>=board.size()||j<0||j>=board[0].size()||board[i][j]!='O')
return;
board[i][j]='B'; //mark it as border
dfs(board,i-1,j);
dfs(board,i+1,j);
dfs(board,i,j-1);
dfs(board,i,j+1);
}
*/
};<file_sep>/*
Given a string S, find the longest palindromic substring in S. You may assume that the maximum length of S is 1000, and there exists one unique longest palindromic substring.
*/
class Solution {
public:
string longestPalindrome(string s) {
return longestPalindrome2(s);
}
// take O(n^2) time, O(n^2) space
string longestPalindrome1(string s) {
if(s==""||s.size()==1)
return s;
int maxLen=1;
int begin = 0;
vector<vector<bool> > dp(s.size(), vector<bool>(s.size(),false) );
for(int i = 0; i < s.size(); ++i){
dp[i][i]=true;
if(i<s.size()-1 && s[i]==s[i+1])
dp[i][i+1]=true;
}
for(int len = 3; len <= s.size(); ++len){
for(int i = 0; i + len <= s.size(); ++i){
int j = i + len - 1;
if(s[i]==s[j] && dp[i+1][j-1]){
dp[i][j]=true;
begin=i;
maxLen=len;
}
}
}
return s.substr(begin,maxLen);
}
// take O(n^2) time, O(1) space
string longestPalindrome2(string s) {
int n = s.size();
if (n == 0) return "";
int maxi = 0;
int maxl = 1;
for (int i = 0; i < n; i++) {
for (int j = 0; j < 2; j++) {
int start = i - j;
int end = i + 1;
while (start >= 0 && end < n && s[start] == s[end])
start--, end++;
start++, end--;
int len = end - start + 1;
if (len > maxl) maxi = start, maxl = len;
}
}
return s.substr(maxi, maxl);
}
//O(N) (Manacher's Algorithm)
string longestPalindrome_4(string s) {
int N = s.size();
int dp[2 * N + 1];
int id = 0, mx = 0;
for (int i = 0; i < 2 * N + 1; ++i)
{
int j = 2 * id - i;
dp[i] = mx > i ? min(dp[j], mx - i) : 1;
int left = i - dp[i], right = i + dp[i];
for (; left >= 0 && right <= 2 * N; left--, right++)
{
if (left % 2 == 0 || s[left/2] == s[right/2]) // padding or char
dp[i]++;
else
break;
}
if (i + dp[i] > mx)
{
id = i;
mx = id + dp[id];
}
}
int res = 0;
for (int i = 1; i < 2 * N + 1; ++i)
if (dp[i] > dp[res])
res = i;
return s.substr(res / 2 - (dp[res] - 1) / 2, dp[res] - 1);
}
/*
//manacher's O(N) my implementation
string longestPalindrome2(string s) {
//pad the origin string
stringstream ss;
ss<<"^#";
for(int i =0 ; i<s.size(); ++i)
ss<<s[i]<<"#";
ss<<"$";
string text = ss.str();
//iteratively find longest substring
vector<int> lens(text.size(),1);
int c = 1; //center
int r = 1; //right-side
for(int i = 2 ; i < text.size(); ++i){
int j = 2*c-i;
int l = min(lens[j],max(0,r-i));
while(i-l>=0&&i+l<text.size()&&text[i-l]==text[i+l]) ++l;
lens[i] = l;
if(i+l>=c){
c = i;
r = i + l - 1;
}
}
//find max len
int radius = 0;
int center = 0;
for(int i = 0 ; i < lens.size(); ++i){
if(lens[i] > radius){
center = i;
radius = lens[i];
}
}
//trim
string subs = text.substr(center-radius+1,radius*2-1);
//cout << subs << endl;
stringstream ss2;
for(int i =0 ; i<subs.size(); ++i){
if(subs[i]!='#'&&subs[i]!='^'&&subs[i]!='$') ss2<<subs[i];
}
string retStr = ss2.str();
return retStr;
}
*/
};
<file_sep>/*
Given a collection of integers that might contain duplicates, S, return all possible subsets.
Note:
Elements in a subset must be in non-descending order.
The solution set must not contain duplicate subsets.
For example,
If S = [1,2,2], a solution is:
[
[2],
[1],
[1,2,2],
[2,2],
[1,2],
[]
]
*/
class Solution {
public:
void subsets(vector<int> &S, int index, vector<int>& v,
vector<vector<int> >& vv, vector<bool>& used) {
vv.push_back(v);
if(index < S.size()){
for(int i = index; i < S.size(); ++i){
//make sure previous same one must be used
if(i>0&&S[i-1]==S[i]&&used[i-1]==false)
continue;
v.push_back(S[i]);
used[i]=true;
subsets(S,i+1,v,vv,used);
v.pop_back();
used[i]=false;
}
}
}
vector<vector<int> > subsetsWithDup(vector<int> &S) {
vector<vector<int> > vv;
vector<int> v;
vector<bool> used(S.size(),false);
sort(S.begin(),S.end());
subsets(S,0,v,vv,used);
return vv;
}
};<file_sep>/*
Given an absolute path for a file (Unix-style), simplify it.
For example,
path = "/home/", => "/home"
path = "/a/./b/../../c/", => "/c"
click to show corner cases.
Corner Cases:
Did you consider the case where path = "/../"?
In this case, you should return "/".
Another corner case is the path might contain multiple slashes '/' together, such as "/home//foo/".
In this case, you should ignore redundant slashes and return "/home/foo".
*/
class Solution {
public:
string simplifyPath(string path) {
vector<string> stack;
assert(path[0]=='/'); //assume absolute path is given
int i=0;
while( i < path.size())
{
while(path[i] =='/' && i< path.size()) i++; //skip the begining '////'
if(i == path.size())
break;
int start = i;
while(path[i]!='/' && i< path.size()) i++; //decide the end boundary
int end = i-1;
string element = path.substr(start, end-start+1);
if(element == "..")
{
if(stack.size() >0)
stack.pop_back();
}
else if(element!=".") {
stack.push_back(element);
}
}
if(stack.size() ==0) return "/";
string simpPath;
for(int i =0; i<stack.size(); i++)
simpPath += "/" + stack[i];
return simpPath;
}
};<file_sep>/*
Given a sorted array of integers, find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
For example,
Given [5, 7, 7, 8, 8, 10] and target value 8,
return [3, 4].
*/
class Solution {
public:
vector<int> searchRange(int A[], int n, int target) {
int l = lower_bound(A, n, target);
int u = upper_bound(A, n, target);
vector<int> res(2, -1);
if(l!=u){
res[0] = l;
res[1] = u-1;
}
return res;
}
//[ )
int lower_bound(int A[], int n, int target) {
int l = 0, u = n;
while (l < u) {
int m = l+(u-l)/2;
if (A[m] < target) l = m+1;
else u = m;
}
return l;
}
//[ )
int upper_bound(int A[], int n, int target) {
int l = 0, u = n;
while (l < u) {
int m = l+(u-l)/2;
if (A[m] <= target) l = m+1;
else u = m;
}
return l;
}
};
<file_sep>/*
You are given a string, S, and a list of words, L, that are all of the same length. Find all starting indices of substring(s) in S that is a concatenation of each word in L exactly once and without any intervening characters.
For example, given:
S: "barfoothefoobarman"
L: ["foo", "bar"]
You should return the indices: [0,9].
(order does not matter).
*/
class Solution {
public:
vector<int> findSubstring(string S, vector<string> &L) {
vector<int> v;
if(S==""||L.size()==0||L[0].size()==0)
return v;
int len = L[0].size();
if(S.size()<len*L.size())
return v;
unordered_map<string,int> needs;
for(int i = 0 ; i < L.size(); ++i){
needs[L[i]]++;
}
for(int i = 0; i <= S.size()-len*L.size(); ++i){
unordered_map<string,int> founds;
int j = 0;
for(; j < L.size(); ++j){
string str = S.substr(i+j*len,len);
if(needs.count(str)==0)
break;
founds[str]++;
if(founds[str]>needs[str])
break;
}
if(j==L.size()){
v.push_back(i);
}
}
return v;
}
};<file_sep>/*
Given preorder and inorder traversal of a tree, construct the binary tree.
Note:
You may assume that duplicates do not exist in the tree
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
TreeNode *buildTree(vector<int> &preorder, int preb, int pree,
vector<int> &inorder, int inb, int ine){
if(preb>=pree)
return NULL;
int pivot = preorder[preb];
int mid = inb;
while(inorder[mid]!=pivot)
++mid;
int leftSize = mid-inb;
TreeNode* node = new TreeNode(pivot);
node->left = buildTree(preorder,preb+1,preb+leftSize+1,inorder,inb,inb+leftSize);
node->right = buildTree(preorder,preb+leftSize+1,pree,inorder,inb+leftSize+1,ine );
return node;
}
TreeNode *buildTree(vector<int> &preorder, vector<int> &inorder) {
if(preorder.size()!=inorder.size())
return NULL;
if(preorder.size()==0)
return NULL;
//off by one convention
return buildTree(preorder,0,preorder.size(),inorder,0,inorder.size());
}
};<file_sep>/*
The n-queens puzzle is the problem of placing n queens on an n×n chessboard such that no two queens attack each other.
Given an integer n, return all distinct solutions to the n-queens puzzle.
Each solution contains a distinct board configuration of the n-queens' placement, where 'Q' and '.' both indicate a queen and an empty space respectively.
For example,
There exist two distinct solutions to the 4-queens puzzle:
[
[".Q..", // Solution 1
"...Q",
"Q...",
"..Q."],
["..Q.", // Solution 2
"Q...",
"...Q",
".Q.."]
]
*/
class Solution {
public:
bool isSafe(vector<string> &board, int row, int col) {
int N = board.size();
int i = row, j = 0;
while(j < col)
if (board[i][j++] == 'Q') return false;
i = row, j = col;
while (i >=0 && j >= 0)
if (board[i--][j--] == 'Q') return false;
i = row, j = col;
while (i < N && j >= 0)
if (board[i++][j--] == 'Q') return false;
return true;
};
void solveNQueensHelper(vector<string> &board, int col, vector<vector<string> > &result) {
int N = board.size();
if (col == N) {
result.push_back(board);
return;
}
for(int i = 0; i < N; i++) {
if (isSafe(board, i, col)) {
board[i][col] = 'Q';
solveNQueensHelper(board, col + 1, result);
board[i][col] = '.';
}
}
};
vector<vector<string> > solveNQueens(int n) {
vector<vector<string> > result;
string row(n, '.');
vector<string> board(n, row);
solveNQueensHelper(board, 0, result);
return result;
}
};<file_sep>/*
Given a linked list and a value x, partition it such that all nodes less than x come before nodes greater than or equal to x.
You should preserve the original relative order of the nodes in each of the two partitions.
For example,
Given 1->4->3->2->5->2 and x = 3,
return 1->2->2->4->3->5.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *partition(ListNode *head, int x) {
ListNode* frontHead = new ListNode(-1), *frontNode = frontHead;
ListNode* backHead = new ListNode(-1), *backNode = backHead;
while (head != NULL) {
if (head->val < x) frontNode->next = head, frontNode = frontNode->next;
else backNode->next = head, backNode = backNode->next;
head = head->next;
}
backNode->next = NULL;
frontNode->next = deleteNode(backHead);
return deleteNode(frontHead);
}
ListNode * deleteNode(ListNode * curNode) {
ListNode * toDel = curNode;
curNode = curNode->next;
delete toDel;
return curNode;
}
};<file_sep>/*
Given a m x n matrix, if an element is 0, set its entire row and column to 0. Do it in place.
click to show follow up.
Follow up:
Did you use extra space?
A straight forward solution using O(mn) space is probably a bad idea.
A simple improvement uses O(m + n) space, but still not the best solution.
Could you devise a constant space solution?
*/
class Solution {
public:
void setZeroes(vector<vector<int> > &matrix) {
if(matrix.size()==0||matrix[0].size()==0)
return ;
//constant memory
setZeroes2(matrix);
return;
vector<bool> rows(matrix.size(),false);
vector<bool> cols(matrix[0].size(),false);
for(int i = 0; i < matrix.size(); ++i){
for(int j = 0 ;j < matrix[i].size(); ++j){
if(matrix[i][j]==0){
rows[i]=true;
cols[j]=true;
}
}
}
//set rows
for(int i = 0; i < rows.size(); ++i){
if(rows[i]){
for(int j = 0; j < matrix[i].size(); ++j){
matrix[i][j] = 0;
}
}
}
//set cols
for(int j = 0; j < cols.size(); ++j){
if(cols[j]){
for(int i = 0; i < matrix.size(); ++i){
matrix[i][j] = 0;
}
}
}
}
//first row and first column are reused
void setZeroes2(vector<vector<int> > &matrix) {
int M = matrix.size(), N = matrix[0].size();
int fr = 1;
for (int j = 0; j < N; j++) {
if (matrix[0][j] == 0) {
fr = 0;
break;
}
}
int fc = 1;
for (int i = 0; i < M; i++) {
if (matrix[i][0] == 0) {
fc = 0;
break;
}
}
for (int i = 1; i < M; i++) {
for (int j = 1; j < N; j++) {
if (matrix[i][j] == 0) matrix[i][0] = matrix[0][j] = 0;
}
}
for (int i = 1; i < M; i++) {
for (int j = 1; j < N; j++) {
if (matrix[i][0] == 0 || matrix[0][j] == 0) matrix[i][j] = 0;
}
}
if (fr == 0) {
for (int j = 0; j < N; j++) matrix[0][j] = 0;
}
if (fc == 0) {
for (int i = 0; i < M; i++) matrix[i][0] = 0;
}
}
};<file_sep>/*
Given a binary tree, check whether it is a mirror of itself (ie, symmetric around its center).
For example, this binary tree is symmetric:
1
/ \
2 2
/ \ / \
3 4 4 3
But the following is not:
1
/ \
2 2
\ \
3 3
Note:
Bonus points if you could solve it both recursively and iteratively.
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ's Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where '#' signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}".
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//recursive
bool isSymmetric(TreeNode * l, TreeNode * r) {
if( (l==NULL) ^ (r==NULL) )
return false;
if(!l)
return true;
if(l->val!=r->val)
return false;
return isSymmetric(l->left,r->right) && isSymmetric(l->right,r->left);
}
bool isSymmetric(TreeNode *root) {
if(root==NULL)
return true;
//return isSymmetric(root->left,root->right);
return isSymmetric2(root);
}
//iterative
bool isSymmetric2(TreeNode *root){
queue<TreeNode*> q;
q.push(root);
int size = 1;
vector<int> v;
while(!q.empty()){
TreeNode* n = q.front();
q.pop();
--size;
if(n!=NULL){
v.push_back(n->val);
q.push( n->left?n->left: NULL);
q.push( n->right?n->right: NULL);
}
else{
v.push_back('#');
}
if(size==0){
size=q.size();
for(int i = 0 , j = v.size()-1; i<j ; ++i,--j){
if(v[i]!=v[j])
return false;
}
v.clear();
}
}
return true;
}
};
<file_sep>/*
Implement next permutation, which rearranges numbers into the lexicographically next greater permutation of numbers.
If such arrangement is not possible, it must rearrange it as the lowest possible order (ie, sorted in ascending order).
The replacement must be in-place, do not allocate extra memory.
Here are some examples. Inputs are in the left-hand column and its corresponding outputs are in the right-hand column.
1,2,3 → 1,3,2
3,2,1 → 1,2,3
1,1,5 → 1,5,1
*/
class Solution {
public:
void nextPermutation(vector<int> &num) {
if(num.size()==0||num.size()==1)
return;
int i = num.size()-2;
for( ; i>=0; --i){
if(num[i]<num[i+1])
break;
}
//rollback
if(i==-1){
reverse(num.begin(),num.end());
return;
}
int j = i+1;
while(j<num.size()){
if(num[j]<=num[i])
break;
++j;
}
swap(num[i],num[j-1]);
reverse(num.begin()+i+1, num.end() );
}
};<file_sep>/*
Given a string s, partition s such that every substring of the partition is a palindrome.
Return the minimum cuts needed for a palindrome partitioning of s.
For example, given s = "aab",
Return 1 since the palindrome partitioning ["aa","b"] could be produced using 1 cut.
*/
class Solution {
public:
int minCut(string s) {
if(s.size()<2)
return 0;
int N = s.size();
vector<vector<bool> > dp(N,vector<bool>(N,false) ); //cache palindrome substrs
vector<int> minCuts(N,INT_MAX); // cache intermediate min cuts
for(int i = 0; i < N; ++i){
for(int j = i ;j >= 0 ; --j){
if(s[j]==s[i]&&(i-j<2||dp[j+1][i-1])){
dp[j][i]=true;
if(j==0){
minCuts[i] = 0;
}
else{
minCuts[i] = std::min(minCuts[i],minCuts[j-1]+1);
}
}
}
}
return minCuts[N-1];
}
};
<file_sep>/*
Given n, how many structurally unique BST's (binary search trees) that store values 1...n?
For example,
Given n = 3, there are a total of 5 unique BST's.
1 3 3 2 1
\ / / / \ \
3 2 1 1 3 2
/ / \ \
2 1 2 3
*/
class Solution {
public:
unordered_map<int,int> cache;
int numTrees(int n) {
if(n<=1)
return 1;
if(cache.count(n))
return cache[n];
int num = 0;
for(int i = 0; i<n; ++i){
int left = i;
int right = n - i - 1;
num += numTrees(i)*numTrees(right);
}
cache[n] = num;
return num;
}
};<file_sep>/*
Given a sorted linked list, delete all duplicates such that each element appear only once.
For example,
Given 1->1->2, return 1->2.
Given 1->1->2->3->3, return 1->2->3.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *deleteDuplicates(ListNode *head) {
return deleteDuplicates1(head);
}
//recursive
ListNode *deleteDuplicates2(ListNode *head) {
if (!head || !(head->next) ) return head;
if ( head->val != head->next->val) {
head->next = deleteDuplicates(head->next);
return head;
}
ListNode* next = head->next;
delete head;
return deleteDuplicates(next);
}
//iterative
ListNode *deleteDuplicates1(ListNode *head) {
if(head==NULL||head->next==NULL)
return head;
ListNode* tail = head;
int tailVal = tail->val;
ListNode* cur = head->next;
while(cur){
if(cur->val!=tailVal){
tail->next = cur;
tail=tail->next;
tailVal = tail->val;
cur=cur->next;
}else{
ListNode* n = cur;
cur=cur->next;
delete n;
}
}
tail->next = NULL;
return head;
}
};<file_sep>/*
Given a string, find the length of the longest substring without repeating characters. For example, the longest substring without repeating letters for "abcabcbb" is "abc", which the length is 3. For "bbbbb" the longest substring is "b", with the length of 1.
*/
class Solution {
public:
int lengthOfLongestSubstring(string s) {
//char,index
unordered_map<char,int> cache;
int max = 0;
int len = 0;
for(int i = 0; i < s.size(); ++i){
if(cache.count(s[i])==0){
cache[s[i]]=i;
++len;
max = std::max(max,len);
}
else{
int preIndex = cache[s[i]];
for(int j = preIndex; j >= i - len ; --j){
cache.erase(s[j]);
}
cache[s[i]]=i;
len = cache.size();
}
}
return max;
}
};<file_sep>/*
You are given an n x n 2D matrix representing an image.
Rotate the image by 90 degrees (clockwise).
Follow up:
Could you do this in-place?
*/
class Solution {
public:
void rotate(vector<vector<int> > &matrix) {
if (matrix.empty()) return;
int N = matrix.size();
for (int i = 0; i <= N/2; i++) {
int first = i, last = N-i-1;
for (int j = first; j < last; j++) {
int top = matrix[first][j], offset = j-first;
matrix[first][j] = matrix[last-offset][first];
matrix[last-offset][first] = matrix[last][last-offset];
matrix[last][last-offset] = matrix[j][last];
matrix[j][last] = top;
}
}
}
/*
void rotate(vector<vector<int> > &matrix) {
int N = matrix.size();
if(N<=1)
return;
for(int i = 0; i < N/2; ++i){
int b = i;
int e = N-i-1;
for(int l = 0; b+l<e ; ++l){
int tmp = matrix[b][b+l];
matrix[b][b+l] = matrix[e-l][b];
matrix[e-l][b] = matrix[e][e-l];
matrix[e][e-l] = matrix[b+l][e];
matrix[b+l][e] = tmp;
}
}
return;
}
*/
};<file_sep>/*
Given an integer n, generate a square matrix filled with elements from 1 to n2 in spiral order.
For example,
Given n = 3,
You should return the following matrix:
[
[ 1, 2, 3 ],
[ 8, 9, 4 ],
[ 7, 6, 5 ]
]
*/
class Solution {
public:
vector<vector<int> > generateMatrix(int n) {
if(n<=0)
return vector<vector<int> >();
vector<vector<int> > matrix(n,vector<int>(n,0));
vector<pair<int,int> > dirs;
dirs.push_back(pair<int,int>(0,1));
dirs.push_back(pair<int,int>(1,0));
dirs.push_back(pair<int,int>(0,-1));
dirs.push_back(pair<int,int>(-1,0));
int i = 0;
int j = 0;
int cur = 0;
for(int k = 1; k <= n*n; ++k){
matrix[i][j] = k; //set value and mark the border
pair<int,int> curDir = dirs[cur];
int nextI = i + curDir.first;
int nextJ = j + curDir.second;
if( nextI>=matrix.size()||nextI<0 ||
nextJ >=matrix[0].size() || nextJ < 0 ||
matrix[nextI][nextJ]!=0){
cur = (cur+1)%dirs.size();
pair<int,int> curDir = dirs[cur];
nextI = i + curDir.first;
nextJ = j + curDir.second;
}
i = nextI;
j = nextJ;
}
return matrix;
}
};<file_sep>/*
Given n non-negative integers representing the histogram's bar height where the width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit.
For example,
Given height = [2,1,5,6,2,3],
return 10.
*/
class Solution {
public:
int largestRectangleArea(vector<int> &height) {
if(height.size()==0)
return 0;
if(height.size()==1){
return height[0];
}
vector<int> left(height.size(),0);
stack<int> indexes;
indexes.push(0);
for(int i = 1 ; i < left.size(); ++i){
while(indexes.empty()==false && height[ indexes.top() ] >= height[i] ){
indexes.pop();
}
if(indexes.empty()){
left[i] = i;
}
else{
left[i] = i - indexes.top() - 1 ;
}
indexes.push(i);
}
vector<int> right(height.size(),0);
indexes=stack<int>();
indexes.push(height.size()-1);
for(int i = right.size()-2; i >= 0 ; --i){
while(indexes.empty()==false && height[ indexes.top() ] >= height[i] ){
indexes.pop();
}
if(indexes.empty()){
right[i] = right.size() - i - 1;
}
else{
right[i] = indexes.top() - i - 1 ;
}
indexes.push(i);
}
int max = 0;
for(int i = 0 ; i < height.size(); ++i){
max = std::max(max, height[i]*(left[i]+1+right[i]) );
}
return max;
}
};
<file_sep>/*
Given a list, rotate the list to the right by k places, where k is non-negative.
For example:
Given 1->2->3->4->5->NULL and k = 2,
return 4->5->1->2->3->NULL.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *rotateRight(ListNode *head, int k) {
if(!head){
return head;
}
int len = 0;
ListNode* cur = head;
while(cur){
cur = cur -> next;
++len;
}
k = k%len;
if(k==0){
return head;
}
k = len - k;
ListNode* pre = head;
cur = head;
int i = 0;
while(i!=k){
if(i<k-1)
pre = pre -> next;
cur = cur ->next;
++i;
}
pre->next = NULL;
ListNode* newHead = cur;
ListNode* tail = cur;
while(tail->next){
tail = tail->next;
}
tail->next = head;
return newHead;
}
};<file_sep>/*
Follow up for problem "Populating Next Right Pointers in Each Node".
What if the given tree could be any binary tree? Would your previous solution still work?
Note:
You may only use constant extra space.
For example,
Given the following binary tree,
1
/ \
2 3
/ \ \
4 5 7
After calling your function, the tree should look like:
1 -> NULL
/ \
2 -> 3 -> NULL
/ \ \
4-> 5 -> 7 -> NULL
*/
/**
* Definition for binary tree with next pointer.
* struct TreeLinkNode {
* int val;
* TreeLinkNode *left, *right, *next;
* TreeLinkNode(int x) : val(x), left(NULL), right(NULL), next(NULL) {}
* };
*/
//any binary tree
class Solution {
public:
void connect(TreeLinkNode *root) {
connect2(root);
}
//queue
void connect1(TreeLinkNode *root) {
if(root==NULL)
return;
queue<TreeLinkNode*> q;
q.push(root);
int count = 1;
vector<TreeLinkNode*> v;
while( !q.empty() ){
TreeLinkNode* node = q.front();
q.pop();
if(node->left)
q.push(node->left);
if(node->right)
q.push(node->right);
v.push_back(node);
--count;
if(count==0){
count = q.size();
v.push_back(NULL);
for(int i = 0 ; i < v.size()-1; ++i){
v[i]->next=v[i+1];
}
v.clear();
}
}
}
//no queue
void connect2(TreeLinkNode *root) {
if(root==NULL)
return;
//如果右孩子不为空,左孩子的next是右孩子。
//反之,找root next的至少有一个孩子不为空的节点
if (root->left != NULL) {
if (root->right != NULL) {
root->left->next = root->right;
}
else {
TreeLinkNode* p = root->next;
while (p != NULL && p->left == NULL && p->right == NULL)
p = p->next;
if (p != NULL)
root->left->next = p->left == NULL ? p->right : p->left;
}
}
//右孩子的next 根节点至少有一个孩子不为空的next
if (root->right != NULL) {
TreeLinkNode* p = root->next;
while (p != NULL && p->left == NULL && p->right == NULL)
p = p->next;
if (p != NULL)
root->right->next = p->left == NULL ? p->right : p->left;
}
connect(root->right);
connect(root->left);
}
};<file_sep>/*
Given a m x n grid filled with non-negative numbers, find a path from top left to bottom right which minimizes the sum of all numbers along its path.
Note: You can only move either down or right at any point in time.
*/
class Solution {
public:
int minPathSum(vector<vector<int> > &grid) {
int N = grid.size();
if(!N)
return 0;
int M = grid[0].size();
if(!M)
return 0;
for(int i = 0; i < N; ++i){
for(int j = 0; j < M; ++j){
if(i==0&&j==0)
continue;
grid[i][j] = min(i>0?grid[i-1][j]:INT_MAX , j>0?grid[i][j-1]:INT_MAX) + grid[i][j];
}
}
return grid[N-1][M-1];
}
};<file_sep>/*
Say you have an array for which the ith element is the price of a given stock on day i.
Design an algorithm to find the maximum profit. You may complete at most two transactions.
Note:
You may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again).
*/
class Solution {
public:
int maxProfit(vector<int> &prices) {
if(prices.size()<2){
return 0;
}
vector<int> profitUtil(prices.size(),0);
vector<int> profitFrom(prices.size(),0);
int min = prices[0];
int profit = 0;
for(int i= 1; i < prices.size(); ++i){
if(prices[i]<min){
min=prices[i];
}
else{
profit = std::max(profit,prices[i]-min);
}
profitUtil[i] = profit;
}
profit = 0;
int max= prices[prices.size()-1];
for(int i = prices.size()-2; i >=0 ; --i){
if(prices[i]>max){
max = prices[i];
}
else{
profit = std::max(profit, max - prices[i]);
}
profitFrom[i] = profit;
}
//once transaction
int maxOne = profit;
//at most twice
int maxTwo = 0;
for(int i = 1; i < prices.size()-2; ++i){
maxTwo = std::max(maxTwo, profitUtil[i]+profitFrom[i+1] );
}
return std::max(maxOne,maxTwo);
}
};
<file_sep>/*
Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get and set.
get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1.
set(key, value) - Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item.
*/
class LRUCache{
public:
LRUCache(int capacity) {
_capacity = capacity;
_size = 0;
}
int get(int key) {
if (_map.find(key) == _map.end()) return -1;
auto it = _map[key];
_list.push_front(*it);
_list.erase(it);
it = _list.begin();
_map[key] = it;
return it->second;
}
void set(int key, int value) {
auto it = _map.find(key);
if (it != _map.end()) {
_map.erase(key);
_list.erase(it->second);
_size--;
}
else if (_size == _capacity) {
_map.erase(_list.back().first);
_list.pop_back();
_size--;
}
_list.push_front(make_pair(key, value));
_map[key] = _list.begin();
_size++;
}
list<pair<int, int> > _list;
unordered_map<int, list<pair<int, int>>::iterator> _map;
int _size;
int _capacity;
};<file_sep>/*
The count-and-say sequence is the sequence of integers beginning as follows:
1, 11, 21, 1211, 111221, ...
1 is read off as "one 1" or 11.
11 is read off as "two 1s" or 21.
21 is read off as "one 2, then one 1" or 1211.
Given an integer n, generate the nth sequence.
Note: The sequence of integers will be represented as a string.
*/
class Solution {
public:
string countAndSay(int n) {
string s = "1";
for(int i = 1; i < n; ++i){
stringstream ss;
int b = 0;
int e = 1;
while(e!=s.size()){
if(s[e]!=s[b]){
ss << e-b << s[b];
b = e;
}
++e;
}
ss << e-b << s[b];
s = ss.str();
}
return s;
}
};<file_sep>/*
Given a collection of candidate numbers (C) and a target number (T), find all unique combinations in C where the candidate numbers sums to T.
Each number in C may only be used once in the combination.
Note:
All numbers (including target) will be positive integers.
Elements in a combination (a1, a2, … , ak) must be in non-descending order. (ie, a1 ≤ a2 ≤ … ≤ ak).
The solution set must not contain duplicate combinations.
For example, given candidate set 10,1,2,7,6,1,5 and target 8,
A solution set is:
[1, 7]
[1, 2, 5]
[2, 6]
[1, 1, 6]
*/
class Solution {
public:
vector<vector<int> > vv;
void combinationSum(vector<int> &candidates, int target, int index, int sum,
vector<int>& v ,vector<bool>& used){
if(sum==target){
vv.push_back(v);
return;
}
if(sum>target||index>=candidates.size()){
return;
}
//previous same ones must be used if it exists
bool flag = true;
if( index!=0 && (candidates[index-1]==candidates[index] && used[index-1]==false ) ){
flag = false;//previous same one is not used
}
if(flag){
v.push_back(candidates[index]);
used[index]=true;
combinationSum(candidates,target,index+1,sum+candidates[index],v,used);
v.pop_back();
used[index]=false;
}
combinationSum(candidates,target,index+1,sum,v,used);
}
vector<vector<int> > combinationSum2(vector<int> & num, int target) {
sort(num.begin(),num.end());
vector<int> v;
vector<bool> used(num.size(),false);
combinationSum(num,target,0,0,v,used);
return vv;
}
};<file_sep>/*
Given a binary tree, return the bottom-up level order traversal of its nodes' values. (ie, from left to right, level by level from leaf to root).
For example:
Given binary tree {3,9,20,#,#,15,7},
3
/ \
9 20
/ \
15 7
return its bottom-up level order traversal as:
[
[15,7]
[9,20],
[3],
]
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ's Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where '#' signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}".
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
void levelOrder(TreeNode *root, vector<vector<int>* >& vv, int depth){
if(root==NULL)
return;
if(vv.size()<depth){
vv.push_back(new vector<int>());
}
vv[depth-1]->push_back(root->val);
levelOrder(root->left,vv, depth+1);
levelOrder(root->right,vv, depth+1);
}
vector<vector<int> > levelOrderBottom(TreeNode *root) {
vector<vector<int>* > vv;
levelOrder(root, vv, 1);
//reverse order
reverse(vv.begin(),vv.end());
vector<vector<int> > retVV;
for(int i = 0; i < vv.size(); ++i){
retVV.push_back(*(vv[i]));
delete vv[i];
}
return retVV;
}
};<file_sep>/*
Given a binary tree, return the zigzag level order traversal of its nodes' values. (ie, from left to right, then right to left for the next level and alternate between).
For example:
Given binary tree {3,9,20,#,#,15,7},
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
vector<vector<int> > zigzagLevelOrder(TreeNode *root) {
vector<vector<int> > vv;
if(root==NULL)
return vv;
vector<int> v;
queue<TreeNode*> q;
q.push(root);
int size = 1;
int level = 1;
while(!q.empty()){
TreeNode* n = q.front();
q.pop();
--size;
v.push_back(n->val);
if(n->left)
q.push(n->left);
if(n->right)
q.push(n->right);
if(0==size){
if(level%2==0){
reverse(v.begin(),v.end());
}
vv.push_back(v);
v.clear();
size = q.size();
++level;
}
}
return vv;
}
/*
void printLevelOrderZigZag(BinaryTree *root) {
stack<BinaryTree*> currentLevel, nextLevel;
bool leftToRight = true;
currentLevel.push(root);
while (!currentLevel.empty()) {
BinaryTree *currNode = currentLevel.top();
currentLevel.pop();
if (currNode) {
cout << currNode->data << " ";
if (leftToRight) {
nextLevel.push(currNode->left);
nextLevel.push(currNode->right);
} else {
nextLevel.push(currNode->right);
nextLevel.push(currNode->left);
}
}
if (currentLevel.empty()) {
cout << endl;
leftToRight = !leftToRight;
swap(currentLevel, nextLevel);
}
}
}
*/
};<file_sep>/*
Given two words word1 and word2, find the minimum number of steps required to convert word1 to word2. (each operation is counted as 1 step.)
You have the following 3 operations permitted on a word:
a) Insert a character
b) Delete a character
c) Replace a character
*/
class Solution {
public:
int minDistance(string word1, string word2) {
if(word1=="")
return word2.size();
if(word2=="")
return word1.size();
int row = word1.size()+1;
int col = word2.size()+1;
vector<vector<int> > dist(row,vector<int>(col,0));
for(int i =0 ; i < row; ++i){
dist[i][0] = i;
}
for(int i =0 ; i < col; ++i){
dist[0][i] = i;
}
for(int i = 1 ; i < row; ++i){
for(int j = 1 ; j < col; ++j){
int d = (word1[i-1]==word2[j-1])?0:1;
dist[i][j] = min(dist[i-1][j-1]+d ,min(dist[i][j-1]+1, dist[i-1][j]+1));
}
}
return dist[word1.size()][word2.size()];
}
};<file_sep>/*
The set [1,2,3,…,n] contains a total of n! unique permutations.
By listing and labeling all of the permutations in order,
We get the following sequence (ie, for n = 3):
"123"
"132"
"213"
"231"
"312"
"321"
Given n and k, return the kth permutation sequence.
Note: Given n will be between 1 and 9 inclusive.
*/
class Solution {
public:
int findkthSmall(vector<bool>& used, int k){
int count = 0;
for(int i = 0; i < used.size(); ++i){
if(!used[i])
++count;
if(count == k){
used[i]=true;
return i+1;
}
}
return 0;
}
//use math method, very clever
string getPermutation(int n, int k) {
vector<int> v(n,1);
for(int i = 2; i <= n ; ++i){
v[i-1]=i*v[i-2];
}
assert(k<=v[n-1]);
vector<bool> used(n,false);
stringstream ss;
for(int i = 0; i < v.size()-1 ; ++i){
int rank = (k-1) / v[v.size()-i-2];
k -= rank*v[v.size()-i-2];
int digit = findkthSmall(used,rank+1);
ss << digit;
}
int digit = findkthSmall(used,1);
ss << digit;
return ss.str();
//solution based on next_permutation
/*
vector<int> v(n,0);
for(int i = 1; i <= n ; ++i){
v[i-1]=i;
}
int i = 0;
while( i++ != k){
nextPermutation(v);
}
stringstream ss;
for(int i = 1; i <= n ; ++i){
ss << v[i-1];
}
return ss.str();
*/
}
void nextPermutation(vector<int> &num) {
if(num.size()==0||num.size()==1)
return;
int i = num.size()-2;
for( ; i>=0; --i){
if(num[i]<num[i+1])
break;
}
//rollback
if(i==-1){
reverse(num.begin(),num.end());
return;
}
int j = i+1;
while(j<num.size()){
if(num[j]<=num[i])
break;
++j;
}
swap(num[i],num[j-1]);
reverse(num.begin()+i+1, num.end() );
}
};<file_sep>/*
Given a string s, partition s such that every substring of the partition is a palindrome.
Return all possible palindrome partitioning of s.
For example, given s = "aab",
Return
[
["aa","b"],
["a","a","b"]
]
*/
//O(N^3) with cache, otherwise O(2^N)
class Solution {
public:
vector<vector<string>> partition(string s) {
//return partition1(s);
return partition2(s);
}
//Top-down DP
vector<vector<string>> vv;
vector<vector<string>> partition1(string s) {
if(s=="")
return vv;
vector<string> v;
partitionHelper(v,s);
return vv;
}
void partitionHelper(vector<string> v, string s){
if(s==""){
vv.push_back(v);
return ;
}
for(int i = 1; i <= s.size(); ++i){
string prefix = s.substr(0,i);
string suffix = s.substr(i);
if(isPalindrome(prefix)){
v.push_back(prefix);
partitionHelper(v,suffix);
v.pop_back();
}
}
}
bool isPalindrome(string s){
static unordered_map<string,bool> cache; // cache palindrom substring
if(s=="")
return true;
if(cache.count(s)>0)
return cache[s];
for(int i = 0 , j = s.size()-1; i<=j; ++i,--j ){
if(s[i]!=s[j]){
cache[s]=false;
return false;
}
}
cache[s]=true;
return true;
}
//Bottom-up DP
vector<vector<string>> partition2(string s) {
vector<vector<string>> vv;
if(s=="")
return vv;
int N=s.size();
vector<vector<bool> > dp1(N,vector<bool>(N,false) );//cache isPalindrome
vector<vector<vector<string> > > dp2(N, vector<vector<string> >());//cache intermediate results
for(int i = 0; i < N; ++i){
for(int j = i ;j >= 0 ; --j){
if(s[i]==s[j]&&(i-j<2||dp1[j+1][i-1])){ // if it palindrome
dp1[j][i]=true;
int len = i-j+1;
if(j==0){
dp2[i].push_back(vector<string>(1, s.substr(0, len)));
}
else{
for (int k = 0; k < dp2[j-1].size(); ++k ) {
vector<string> p = dp2[j-1][k];
p.push_back(s.substr(j, len));
dp2[i].push_back(p);
}
}
}
}
}
return dp2[N-1];
}
};
<file_sep>/*
Given a binary tree, return the postorder traversal of its nodes' values.
For example:
Given binary tree {1,#,2,3},
1
\
2
/
3
return [3,2,1].
Note: Recursive solution is trivial, could you do it iteratively?
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//O(N) space
/*
vector<int> postorderTraversal(TreeNode *root) {
vector<int> v;
if(root==NULL)
return v;
stack<TreeNode*> s;
TreeNode * curNode = root;
unordered_set<TreeNode*> cache;
while(curNode||!s.empty()){
if(curNode){
s.push(curNode);
curNode=curNode->left;
}
else{
curNode = s.top();
s.pop();
if(curNode->right&&cache.count(curNode->right)==0){ //if right is not visited
s.push(curNode);
curNode = curNode->right;
}
else{ //if right is already visited
v.push_back(curNode->val);
cache.insert(curNode);
curNode = NULL;
}
}
}
return v;
}
*/
vector<int> postorderTraversal(TreeNode *root) {
vector<int> v;
if(root==NULL)
return v;
stack<TreeNode*> s;
TreeNode * curNode = root;
TreeNode* preNode = NULL;
while(curNode||!s.empty()){
if(curNode){
s.push(curNode);
curNode=curNode->left;
}
else{
curNode = s.top();
s.pop();
if( curNode->right==NULL||preNode==curNode->right ){ //if right is visited
v.push_back(curNode->val);
preNode = curNode;
curNode = NULL;
}
else{
s.push(curNode);
curNode = curNode->right;
}
}
}
return v;
}
};
//-----------------------------
/*
Clone an undirected graph. Each node in the graph contains a label and a list of its neighbors.
OJ's undirected graph serialization:
Nodes are labeled uniquely.
We use # as a separator for each node, and , as a separator for node label and each neighbor of the node.
As an example, consider the serialized graph {0,1,2#1,2#2,2}.
The graph has a total of three nodes, and therefore contains three parts as separated by #.
First node is labeled as 0. Connect node 0 to both nodes 1 and 2.
Second node is labeled as 1. Connect node 1 to node 2.
Third node is labeled as 2. Connect node 2 to node 2 (itself), thus forming a self-cycle.
Visually, the graph looks like the following:
1
/ \
/ \
0 --- 2
/ \
\_/
*/
/**
* Definition for undirected graph.
* struct UndirectedGraphNode {
* int label;
* vector<UndirectedGraphNode *> neighbors;
* UndirectedGraphNode(int x) : label(x) {};
* };
*/
class Solution {
public:
UndirectedGraphNode *cloneGraph(UndirectedGraphNode *node) {
if(node==NULL){
return NULL;
}
unordered_map<UndirectedGraphNode*, UndirectedGraphNode*> cache;
cache[node] = new UndirectedGraphNode(node->label);
queue<UndirectedGraphNode*> q;
q.push(node);
while(!q.empty()){
UndirectedGraphNode* n = q.front();
q.pop();
for(int i = 0; i < n->neighbors.size(); ++i ){
UndirectedGraphNode* nb = n->neighbors[i];
if( !cache.count(nb) ){
cache[nb] = new UndirectedGraphNode(nb->label);
(cache[n]->neighbors).push_back(cache[nb]);
q.push(nb);
}
else{
(cache[n]->neighbors).push_back(cache[nb]);
}
}
}
return cache[node];
}
};
//-----------------------------
/*
Given a singly linked list where elements are sorted in ascending order, convert it to a height balanced BST.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
//O(N), not O(NlgN)
TreeNode *sortedListToBST(ListNode *head) {
if(head==NULL){
return NULL;
}
int len = 0;
ListNode* cur = head;
while(cur){
cur = cur->next;
++len;
}
TreeNode* root = buildBST(0,len-1,head);
return root;
}
TreeNode* buildBST(int b ,int e, ListNode*& head){
if(b<=e){
int m = b + (e-b)/2;
TreeNode* n = new TreeNode(0);
n->left = buildBST(b,m-1,head);
n->val=head->val;
head=head->next;
n->right = buildBST(m+1,e,head);
return n;
}
return NULL;
}
};
//-----------------------------
/*
A linked list is given such that each node contains an additional random pointer which could point to any node in the list or null.
Return a deep copy of the list.
*/
/**
* Definition for singly-linked list with a random pointer.
* struct RandomListNode {
* int label;
* RandomListNode *next, *random;
* RandomListNode(int x) : label(x), next(NULL), random(NULL) {}
* };
*/
class Solution {
public:
RandomListNode *copyRandomList(RandomListNode *head) {
return copyRandomList2(head);
}
//O(1) space, very tricky
RandomListNode *copyRandomList2(RandomListNode *head) {
//stage1
for (RandomListNode *cur = head; cur; cur = cur->next->next) {
RandomListNode *newNode = new RandomListNode(cur->label);
newNode->next = cur->next;
cur->next = newNode;
}
//stage2
for (RandomListNode *cur = head; cur; cur = cur->next->next)
if (cur->random)
cur->next->random = cur->random->next;
//stage3
RandomListNode dummy(0), *curNew = &dummy;
for (RandomListNode *cur = head; cur; cur = cur->next) {
curNew->next = cur->next;
curNew = curNew->next;
cur->next = cur->next->next;
}
return dummy.next;
}
//O(N) space
RandomListNode *copyRandomList1(RandomListNode *head) {
if(!head)
return NULL;
unordered_map<RandomListNode* , RandomListNode*> cache;
cache[NULL]=NULL;
RandomListNode* cur = head;
while(cur){
RandomListNode* copy = new RandomListNode(cur->label);
cache[cur]=copy;
cur=cur->next;
}
cur = head;
while(cur){
RandomListNode* copy = cache[cur];
copy->next = cache[cur->next];
copy->random = cache[cur->random];
cur=cur->next;
}
return cache[head];
}
};
//-----------------------------
/*
Divide two integers without using multiplication, division and mod operator.
*/
class Solution {
public:
int divide(int dividend, int divisor) {
assert(divisor != 0);
bool flag = dividend > 0 && divisor < 0 ||
dividend < 0 && divisor > 0;
long long dividendll = abs((long long)dividend);
long long divisorll = abs((long long)divisor);
int res = 0;
while (dividendll >= divisorll)
{
long long div = divisorll;
int quot = 1;
while ((div << 1) <= dividendll) {
div <<= 1;
quot <<= 1;
}
dividendll -= div;
res += quot;
}
return flag ? -res : res;
}
int divide1(int dividend, int divisor) {
int neg = 1;
if( (dividend>0) ^ (divisor>0) )
neg = -1;
long long dividend1 = abs((long long)dividend);
long long divisor1 = abs((long long)divisor);
int ret = 0;
while(dividend1>=divisor1){
int j = 0;
while(dividend1 >= divisor1<<(j+1))
++j;
dividend1 -= divisor1<<j;
ret += 1<<j;
}
return ret*neg;
}
};
//-----------------------------
/*
Given a set of non-overlapping intervals, insert a new interval into the intervals (merge if necessary).
You may assume that the intervals were initially sorted according to their start times.
Example 1:
Given intervals [1,3],[6,9], insert and merge [2,5] in as [1,5],[6,9].
Example 2:
Given [1,2],[3,5],[6,7],[8,10],[12,16], insert and merge [4,9] in as [1,2],[3,10],[12,16].
This is because the new interval [4,9] overlaps with [3,5],[6,7],[8,10].
*/
/**
* Definition for an interval.
* struct Interval {
* int start;
* int end;
* Interval() : start(0), end(0) {}
* Interval(int s, int e) : start(s), end(e) {}
* };
*/
class Solution {
public:
vector<Interval> insert(vector<Interval> &intervals, Interval newInterval) {
vector<Interval> newV;
if(intervals.size()==0){
newV.push_back(newInterval);
return newV;
}
int i = 0;
bool used = false;
for( ; i < intervals.size(); ++i ){
Interval& interval = intervals[i];
if(interval.end < newInterval.start){
newV.push_back(interval);
}
else if(interval.start > newInterval.end){
used = true;
newV.push_back(newInterval);
break;
}
else{
int newStart = min(interval.start, newInterval.start);
int newEnd = max(interval.end, newInterval.end);
int j = i+1;
while( j < intervals.size() ){
Interval& interval = intervals[j];
if(newEnd>=interval.start){
++j;
newEnd = max(interval.end, newEnd);
}
else{
break;
}
}
used = true;
newV.push_back(Interval(newStart,newEnd));
i=j;
break;
}
}
if(i<intervals.size()){
copy(intervals.begin()+i,intervals.end(),back_inserter(newV));
}
if( used != true){
newV.push_back(newInterval);
}
return newV;
}
};
//-----------------------------
/*
Given s1, s2, s3, find whether s3 is formed by the interleaving of s1 and s2.
For example,
Given:
s1 = "aabcc",
s2 = "dbbca",
When s3 = "aadbbcbcac", return true.
When s3 = "aadbbbaccc", return false.
*/
class Solution {
public:
//reduce exponetial to polynomial
//dp1
unordered_set<string> bad;
bool isInterleave(string s1, string s2, string s3) {
static string BAD("#_#");
if(s1.size()+s2.size()!=s3.size())
return false;
if(s1=="") return s2==s3;
if(s2=="") return s1==s3;
if(bad.count(s1+BAD+s2+BAD+s3))
return false;
if(s1[0]==s3[0] && isInterleave(s1.substr(1),s2,s3.substr(1)) )
return true;
if(s2[0]==s3[0] && isInterleave(s1,s2.substr(1),s3.substr(1)) )
return true;
bad.insert(s1+BAD+s2+BAD+s3);
return false;
}
//dp2
bool isInterleave2(string & s1, string & s2, string & s3) {
int M = s1.size(), N = s2.size();
if (M + N != s3.size()) return false;
vector<vector<bool> > dp(M + 1, vector<bool>(N + 1, 0));
for (int i = 0; i <= M; i++) {
for (int j = 0; j <= N; j++) {
if (i == 0 && j == 0) dp[i][j] = true;
else if (i == 0) dp[i][j] = (s2[j - 1] == s3[j - 1]) && dp[i][j - 1];
else if (j == 0) dp[i][j] = (s1[i - 1] == s3[i - 1]) && dp[i - 1][j];
else dp[i][j] = ((s2[j - 1] == s3[i + j - 1]) && dp[i][j - 1]) || ((s1[i - 1] == s3[i + j - 1]) && dp[i - 1][j]);
}
}
return dp[M][N];
}
};
//-----------------------------
/*
Given a string containing just the characters '(' and ')', find the length of the longest valid (well-formed) parentheses substring.
For "(()", the longest valid parentheses substring is "()", which has length = 2.
Another example is ")()())", where the longest valid parentheses substring is "()()", which has length = 4
*/
class Solution {
public:
int longestValidParentheses(string s) {
return longestValidParentheses2(s);
}
int longestValidParentheses1(string & s) {
int N = s.size(), res = 0, last = -1;
stack<int> stk;
for (int i = 0; i < (int)s.size(); i++) {
if (s[i] == '(') {
stk.push(i);
continue;
}
if (stk.empty()) {
last = i; //remember where the latest valid position starts
}
else {
stk.pop();
res = max(res, stk.empty() ? (i - last) : (i - stk.top()));
}
}
return res;
}
int longestValidParentheses2(string & s) {
int N = s.size();
return max(longestValidParenthesesHelper(s, '(', 0, N, 1), //(((((((((((((((()
longestValidParenthesesHelper(s, ')', N - 1, -1, -1)); //())))))))))))))))
}
int longestValidParenthesesHelper(string & s, char c, int start, int end, int step) {
int res = 0, counter_c = 0, pair = 0; // counts of c, and valid pairs
for (int i = start; i != end; i += step) {
if (s[i] == c) counter_c++ ;
else counter_c--, pair++;
if (counter_c == 0) res = max(res, pair*2);
else if (counter_c < 0) counter_c = 0, pair = 0;
}
return res;
}
};
//-----------------------------
/*
Given n points on a 2D plane, find the maximum number of points that lie on the same straight line.
*/
/**
* Definition for a point.
* struct Point {
* int x;
* int y;
* Point() : x(0), y(0) {}
* Point(int a, int b) : x(a), y(b) {}
* };
*/
class Solution {
public:
int maxPoints(vector<Point> &points) {
if(points.size()<=2)
return points.size();
int maxPoints = 2;
for(int i = 0; i < points.size()-1; ++i){
Point& src = points[i];
//slope <-> points except src
unordered_map<double, int> cache;
cache.clear();
//number of points same as src
int duplicates = 1;
for(int j = i + 1 ; j < points.size(); ++j){
Point& dest = points[j];
if(dest.x==src.x&&dest.y==src.y){
++duplicates;
continue;
}
double slope = 0.0;
if(src.x==dest.x)
slope = (double)INT_MAX;
else
slope = (double)(dest.y-src.y)/(double)(dest.x-src.x);
if(cache.count(slope)==0)
cache[slope]=1;
else
cache[slope]++;
}
//update local max
int count = 0;
unordered_map<double,int >::iterator iter = cache.begin();
while(iter != cache.end()){
count = max(iter->second,count);
++iter;
}
//update global max
maxPoints = max(maxPoints, count+duplicates );
}
return maxPoints;
}
};
//-----------------------------
/*
Given a string S and a string T, find the minimum window in S which will contain all the characters in T in complexity O(n).
For example,
S = "ADOBECODEBANC"
T = "ABC"
Minimum window is "BANC".
Note:
If there is no such window in S that covers all characters in T, return the emtpy string "".
If there are multiple such windows, you are guaranteed that there will always be only one unique minimum window in S.
*/
class Solution {
public:
string minWindow(string S, string T) {
int needToFind[256] = {0};
int hasFound[256] = {0};
size_t count = 0;
for (size_t i = 0; i < T.size(); i++)
needToFind[(int)T[i]]++;
string res = "";
int min = INT_MAX;
for (size_t begin = 0, end = 0; end < S.size(); end++) {
if (needToFind[(int)S[end]] == 0) continue;
hasFound[(int)S[end]]++;
if (hasFound[(int)S[end]] <= needToFind[(int)S[end]]) count++;
if (count == T.size()) {
while (needToFind[(int)S[begin]] == 0 || hasFound[(int)S[begin]] > needToFind[(int)S[begin]]) {
if (hasFound[(int)S[begin]] > needToFind[(int)S[begin]]) hasFound[(int)S[begin]]--;
begin++;
}
int len = end - begin + 1;
if (len < min) {
min = len;
res = S.substr(begin, len);
}
}
}
return res;
}
};
//-----------------------------
/*
Given a string s, partition s such that every substring of the partition is a palindrome.
Return all possible palindrome partitioning of s.
For example, given s = "aab",
Return
[
["aa","b"],
["a","a","b"]
]
*/
//O(N^3) with cache, otherwise O(2^N)
class Solution {
public:
vector<vector<string>> partition(string s) {
//return partition1(s);
return partition2(s);
}
//Top-down DP
vector<vector<string>> vv;
vector<vector<string>> partition1(string s) {
if(s=="")
return vv;
vector<string> v;
partitionHelper(v,s);
return vv;
}
void partitionHelper(vector<string> v, string s){
if(s==""){
vv.push_back(v);
return ;
}
for(int i = 1; i <= s.size(); ++i){
string prefix = s.substr(0,i);
string suffix = s.substr(i);
if(isPalindrome(prefix)){
v.push_back(prefix);
partitionHelper(v,suffix);
v.pop_back();
}
}
}
bool isPalindrome(string s){
static unordered_map<string,bool> cache; // cache palindrom substring
if(s=="")
return true;
if(cache.count(s)>0)
return cache[s];
for(int i = 0 , j = s.size()-1; i<=j; ++i,--j ){
if(s[i]!=s[j]){
cache[s]=false;
return false;
}
}
cache[s]=true;
return true;
}
//Bottom-up DP
vector<vector<string>> partition2(string s) {
vector<vector<string>> vv;
if(s=="")
return vv;
int N=s.size();
vector<vector<bool> > dp1(N,vector<bool>(N,false) );//cache isPalindrome
vector<vector<vector<string> > > dp2(N, vector<vector<string> >());//cache intermediate results
for(int i = 0; i < N; ++i){
for(int j = i ;j >= 0 ; --j){
if(s[i]==s[j]&&(i-j<2||dp1[j+1][i-1])){ // if it palindrome
dp1[j][i]=true;
int len = i-j+1;
if(j==0){
dp2[i].push_back(vector<string>(1, s.substr(0, len)));
}
else{
for (int k = 0; k < dp2[j-1].size(); ++k ) {
vector<string> p = dp2[j-1][k];
p.push_back(s.substr(j, len));
dp2[i].push_back(p);
}
}
}
}
}
return dp2[N-1];
}
};
//-----------------------------
/*
Given a string s, partition s such that every substring of the partition is a palindrome.
Return the minimum cuts needed for a palindrome partitioning of s.
For example, given s = "aab",
Return 1 since the palindrome partitioning ["aa","b"] could be produced using 1 cut.
*/
class Solution {
public:
int minCut(string s) {
if(s.size()<2)
return 0;
int N = s.size();
vector<vector<bool> > dp(N,vector<bool>(N,false) ); //cache palindrome substrs
vector<int> minCuts(N,INT_MAX); // cache intermediate min cuts
for(int i = 0; i < N; ++i){
for(int j = i ;j >= 0 ; --j){
if(s[j]==s[i]&&(i-j<2||dp[j+1][i-1])){
dp[j][i]=true;
if(j==0){
minCuts[i] = 0;
}
else{
minCuts[i] = std::min(minCuts[i],minCuts[j-1]+1);
}
}
}
}
return minCuts[N-1];
}
};
//-----------------------------
/*
The set [1,2,3,…,n] contains a total of n! unique permutations.
By listing and labeling all of the permutations in order,
We get the following sequence (ie, for n = 3):
"123"
"132"
"213"
"231"
"312"
"321"
Given n and k, return the kth permutation sequence.
Note: Given n will be between 1 and 9 inclusive.
*/
class Solution {
public:
int findkthSmall(vector<bool>& used, int k){
int count = 0;
for(int i = 0; i < used.size(); ++i){
if(!used[i])
++count;
if(count == k){
used[i]=true;
return i+1;
}
}
return 0;
}
//use math method, very clever
string getPermutation(int n, int k) {
vector<int> v(n,1);
for(int i = 2; i <= n ; ++i){
v[i-1]=i*v[i-2];
}
assert(k<=v[n-1]);
vector<bool> used(n,false);
stringstream ss;
for(int i = 0; i < v.size()-1 ; ++i){
int rank = (k-1) / v[v.size()-i-2];
k -= rank*v[v.size()-i-2];
int digit = findkthSmall(used,rank+1);
ss << digit;
}
int digit = findkthSmall(used,1);
ss << digit;
return ss.str();
//solution based on next_permutation
/*
vector<int> v(n,0);
for(int i = 1; i <= n ; ++i){
v[i-1]=i;
}
int i = 0;
while( i++ != k){
nextPermutation(v);
}
stringstream ss;
for(int i = 1; i <= n ; ++i){
ss << v[i-1];
}
return ss.str();
*/
}
void nextPermutation(vector<int> &num) {
if(num.size()==0||num.size()==1)
return;
int i = num.size()-2;
for( ; i>=0; --i){
if(num[i]<num[i+1])
break;
}
//rollback
if(i==-1){
reverse(num.begin(),num.end());
return;
}
int j = i+1;
while(j<num.size()){
if(num[j]<=num[i])
break;
++j;
}
swap(num[i],num[j-1]);
reverse(num.begin()+i+1, num.end() );
}
};
//-----------------------------
/*
Implement pow(x, n).
*/
class Solution {
public:
double pow(double x, int n) {
if (x < 0) return (n % 2 == 0) ? pow(-x, n) : -pow(-x, n);
if (x == 0 || x == 1) return x;
if (n < 0) return 1.0 / pow(x, -n);
if (n == 0) return 1.0;
double half = pow(x, n / 2);
if (n % 2 == 0) return half * half;
else return x * half * half;
}
};
//-----------------------------
/*
Implement regular expression matching with support for '.' and '*'.
'.' Matches any single character.
'*' Matches zero or more of the preceding element.
The matching should cover the entire input string (not partial).
The function prototype should be:
bool isMatch(const char *s, const char *p)
Some examples:
isMatch("aa","a") → false
isMatch("aa","aa") → true
isMatch("aaa","aa") → false
isMatch("aa", "a*") → true
isMatch("aa", ".*") → true
isMatch("ab", ".*") → true
isMatch("aab", "c*a*b") → true
*/
class Solution {
public:
bool isMatch(const char *s, const char *p) {
assert(s && p);
if (*p == '\0') return *s == '\0';
if (*(p+1) != '*') {
assert(*p != '*');
return ((*p == *s) || (*p == '.' && *s != '\0')) && isMatch(s+1, p+1);
}
// next char is '*'
while ((*p == *s) || (*p == '.' && *s != '\0')) {
if (isMatch(s, p+2)) return true;
s++;
}
return isMatch(s, p+2);
};
bool isMatch1(const char *s, const char *p) {
string str(s);
string pattern(p);
isMatch1(str,pattern);
}
bool isMatch1(string s, string p){
if( p=="" )
return s=="";
if(p[1]!='*'){
if(s[0]==p[0]|| (p[0]=='.' && s!="") )
return isMatch1(s.substr(1),p.substr(1));
return false;
}
else{
if( isMatch1(s,p.substr(2)) )
return true;
for(int i = 0; i<s.size(); ++i){
if(s[i]==p[0]|| p[0]=='.'){
if(isMatch1(s.substr(i+1),p.substr(2)))
return true;
}
else{
break;
}
}
return false;
}
}
};
//-----------------------------
/*
Given a string containing only digits, restore it by returning all possible valid IP address combinations.
For example:
Given "25525511135",
return ["255.255.11.135", "255.255.111.35"]. (Order does not matter)
*/
class Solution {
public:
vector<string> restoreIpAddresses(string s) {
vector<string> v;
if(s.size()<4||s.size()>12)
return v;
vector<string> cur;
restoreIpAddresses(v,s,cur);
return v;
}
void restoreIpAddresses(vector<string>& v, string s, vector<string>& cur){
if(cur.size()==4&&s==""){
stringstream ss;
for(int i = 0 ; i < cur.size(); ++i)
ss<<'.'<<cur[i];
v.push_back(ss.str().substr(1));
return;
}
if(cur.size()==4) return;
int len = std::min(3,(int)s.size());
for(int i = 1; i <= len; ++i){
string prefix = s.substr(0,i);
if(isValid(prefix)){
cur.push_back(prefix);
restoreIpAddresses(v,s.substr(i),cur);
cur.pop_back();
}
}
}
bool isValid(string& s){
if(s=="") return false;
if(s.size()==1) return s[0]>='0'&&s[0]<='9';
if(s.size()==2) return atoi(s.c_str())>=10;
if(s.size()==3) return atoi(s.c_str())>=100&&atoi(s.c_str())<=255;
return false;
}
};
//-----------------------------
/*
You are given an n x n 2D matrix representing an image.
Rotate the image by 90 degrees (clockwise).
Follow up:
Could you do this in-place?
*/
class Solution {
public:
void rotate(vector<vector<int> > &matrix) {
if (matrix.empty()) return;
int N = matrix.size();
for (int i = 0; i <= N/2; i++) {
int first = i, last = N-i-1;
for (int j = first; j < last; j++) {
int top = matrix[first][j], offset = j-first;
matrix[first][j] = matrix[last-offset][first];
matrix[last-offset][first] = matrix[last][last-offset];
matrix[last][last-offset] = matrix[j][last];
matrix[j][last] = top;
}
}
}
/*
void rotate(vector<vector<int> > &matrix) {
int N = matrix.size();
if(N<=1)
return;
for(int i = 0; i < N/2; ++i){
int b = i;
int e = N-i-1;
for(int l = 0; b+l<e ; ++l){
int tmp = matrix[b][b+l];
matrix[b][b+l] = matrix[e-l][b];
matrix[e-l][b] = matrix[e][e-l];
matrix[e][e-l] = matrix[b+l][e];
matrix[b+l][e] = tmp;
}
}
return;
}
*/
};
//-----------------------------
/*
Given a binary tree containing digits from 0-9 only, each root-to-leaf path could represent a number.
An example is the root-to-leaf path 1->2->3 which represents the number 123.
Find the total sum of all root-to-leaf numbers.
For example,
1
/ \
2 3
The root-to-leaf path 1->2 represents the number 12.
The root-to-leaf path 1->3 represents the number 13.
Return the sum = 12 + 13 = 25.
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
int sumNumbers(TreeNode *root) {
return sumNumbers_1(root);
}
//recursive
int sumNumbers_1(TreeNode *root) {
int sum = 0;
sumNumbersRe(root, 0, sum);
return sum;
}
void sumNumbersRe(TreeNode *node, int num, int &sum) {
if (!node) return;
num = num * 10 + node->val;
if (!node->left && !node->right) {
sum += num;
return;
}
sumNumbersRe(node->left, num, sum);
sumNumbersRe(node->right, num, sum);
}
//iterative
int sumNumbers_2(TreeNode *root) {
if (!root) return 0;
int res = 0;
queue<pair<TreeNode *, int>> q;
q.push(make_pair(root, 0));
while(!q.empty())
{
TreeNode *node = q.front().first;
int sum = q.front().second * 10 + node->val;
q.pop();
if (!node->left && !node->right)
{
res += sum;
continue;
}
if (node->left)
q.push(make_pair(node->left, sum));
if (node->right)
q.push(make_pair(node->right, sum));
}
return res;
}
};
//-----------------------------
/*
Validate if a given string is numeric.
Some examples:
"0" => true
" 0.1 " => true
"abc" => false
"1 a" => false
"2e10" => true
Note: It is intended for the problem statement to be ambiguous. You should gather all requirements up front before implementing one.
*/
class Solution {
public:
bool isNumber(const char* s) {
// trim leading/trailing spaces
while (*s != '\0' && isspace(*s)) s++;
if (*s == '\0') return false;
const char *e = s + strlen(s) - 1;
while (e > s && isspace(*e)) e--;
// skip leading +/-
if (*s == '+' || *s == '-') s++;
bool num = false; // is a digit
bool dot = false; // is a '.'
bool exp = false; // is a 'e'
while (s != e + 1) {
if (*s >= '0' && *s <= '9') {
num = true;
}
else if (*s == '.') {
if(exp || dot) return false;
dot = true;
}
else if (*s == 'e') {
if(exp || num == false) return false;
exp = true;
num = false;
}
else if (*s == '+' || *s == '-') {
if (*(s-1) != 'e') return false;
}
else {
return false;
}
s++;
}
return num;
}
//state machine
bool isNumber1(const char *s) {
if(*s==0) return false;
enum Action {INVALID=0, SPACE=1, SIGN=2, DIGIT=3, DOT=4, EXPONENT=5};
const int N = 6;
//[state][action]
int transTable[][N] =
{ /* 0 1 2 3 4 5 */
0, 1, 2, 3, 4, 0, // 0: INVALID
0, 1, 2, 3, 4, 0, // 1: SPACE
0, 0, 0, 3, 4, 0, // 2: SIGN
0, 6, 0, 3, 7, 5, // 3: DIGIT
0, 0, 0, 7, 0, 0, // 4: DOT
0, 0, 2, 8, 0, 0, // 5: EXPONENT
0, 6, 0, 0, 0, 0, // 6: END WITH SPACE
0, 6, 0, 7, 0, 5, // 7: DOT AND DIGIT
0, 6, 0, 8, 0, 0, // 8: END WITH SPACE OR DIGIT
};
int state = 0;
while(*s){
Action act = INVALID;
if (*s == ' ')
act = SPACE;
else if (*s == '+' || *s == '-')
act = SIGN;
else if (isdigit(*s))
act = DIGIT;
else if (*s == '.')
act = DOT;
else if (*s == 'e')
act = EXPONENT;
state = transTable[state][act];
if (state == 0) return false;
s++;
}
bool validStates[]={0,0,0,1,0,0,1,1,1};
return validStates[state];
}
};
//-----------------------------
/*
Implement wildcard pattern matching with support for '?' and '*'.
'?' Matches any single character.
'*' Matches any sequence of characters (including the empty sequence).
The matching should cover the entire input string (not partial).
The function prototype should be:
bool isMatch(const char *s, const char *p)
Some examples:
isMatch("aa","a") → false
isMatch("aa","aa") → true
isMatch("aaa","aa") → false
isMatch("aa", "*") → true
isMatch("aa", "a*") → true
isMatch("ab", "?*") → true
isMatch("aab", "c*a*b") → false
*/
class Solution {
public:
/*
Analysis:
For each element in s
If *s==*p or *p == ? which means this is a match, then goes to next element s++ p++.
If p=='*', this is also a match, but one or many chars may be available, so let us save this *'s position and the matched s position.
If not match, then we check if there is a * previously showed up,
if there is no *, return false;
if there is an *, we set current p to the next element of *, and set current s to the next saved s position
abed
?b*d**
a=?, go on, b=b, go on,
e=*, save * position star=3, save s position ss = 3, p++
e!=d, check if there was a *, yes, ss++, s=ss; p=star+1
d=d, go on, meet the end.
check the rest element in p, if all are *, true, else false;
*/
//very clever
bool isMatch(const char *s, const char *p) {
const char* star=NULL;
const char* ss=s;
while (*s){
if ((*p=='?')||(*p==*s)){s++;p++;continue;}
if (*p=='*'){star=p++; ss=s;continue;}
if (star){ p = star+1; s=++ss;continue;}
return false;
}
while (*p=='*'){p++;}
return !*p;
}
//Timeout, so many recursion even if optimized
/*
bool isMatch(const char *s, const char *p) {
if (*s == '\0') {
if (*p == '\0') return true;
if (*p == '*') return isMatch(s,p+1);
return false;
}
if (*p == '\0') return false;
if (*p == '?' || *p == *s) return isMatch(s+1, p+1);
if (*p=='*'){
//advance * as much as possible
while(*p=='*')
++p;
if(*p=='\0')
return true;
--p;
return isMatch(s+1,p) || isMatch(s, p+1);
}
return false;
}
*/
};
//-----------------------------
/*
Given a string s and a dictionary of words dict, determine if s can be segmented into a space-separated sequence of one or more dictionary words.
For example, given
s = "leetcode",
dict = ["leet", "code"].
Return true because "leetcode" can be segmented as "leet code".
*/
class Solution {
public:
unordered_set<string> bad;
//DP top-down O(N^2)
bool wordBreak(string s, unordered_set<string> &dict) {
if(s=="")
return true;
if(dict.count(s)>0)
return true;
if(bad.count(s)>0)
return false;
for(int end = 1; end<=s.size();++end){
string prefix = s.substr(0,end);
string postfix = s.substr(end);
if(dict.count(prefix)>0 && wordBreak(postfix,dict)){
return true;
}
}
bad.insert(s);
return false;
}
//DP bottom-up O(N^2)
/*
bool wordBreak(string s, unordered_set<string> &dict) {
if(s=="")
return false;
vector<bool> can(s.size()+1,false);
can[0]=true;
for(int i = 1 ; i <= s.size(); ++i){
for(int j = i; j>=1; --j){
if(can[j-1]&&dict.count(s.substr(j-1,i-j+1))>0) { can[i]=true;break; }
}
}
return can[s.size()];
}
*/
};
//-----------------------------
/*
Given a string s and a dictionary of words dict, add spaces in s to construct a sentence where each word is a valid dictionary word.
Return all such possible sentences.
For example, given
s = "catsanddog",
dict = ["cat", "cats", "and", "sand", "dog"].
A solution is ["cats and dog", "cat sand dog"].
*/
class Solution {
public:
class Solution {
public:
vector<string> wordBreak(string s, unordered_set<string> &dict) {
if(s==""||dict.size()==0)
return vector<string>();
// dp[i][j] means when string is at size of i , we have a valid word of length j before it
vector<vector<int> > dp(s.size()+1,vector<int>());
dp[0].push_back(0); //no valid word before it
for(int i = 1; i <= s.size(); ++i){ //cur size
for(int j = i; j >= 1 ; --j){ //pre len
if( dict.count(s.substr(i-j,j))>0 && dp[i-j].size()>0 ){
dp[i].push_back(j); // record word len
}
}
}
return buildVec(s,s.size(),dp);
}
vector<string> buildVec(const string& str, int len, const vector<vector<int> >& dp){
vector<string> v;
for(int i = 0; i < dp[len].size(); ++i){
int wordLen = dp[len][i];
if(wordLen==len){
v.push_back(str);
}
else{
vector<string> pres = buildVec(str.substr(0,len-wordLen),len-wordLen,dp);
for(int j = 0 ; j < pres.size(); ++j){
v.push_back(pres[j]+" " + str.substr(len-wordLen) );
}
}
}
return v;
}
};
/*
vector<string> wordBreak(string s, unordered_set<string> &dict) {
int N = s.size();
vector<vector<int> > dp(N+1, vector<int>()); // dp[i][j] means ends at i, previous valid end at j
dp[0].push_back(0);
for (int i = 1; i <= N; i++)
for (int j = 1; j <= i; j++)
if (dp[i-j].size() && (dict.find(s.substr(i-j, j)) != dict.end()))
dp[i].push_back(i-j);
return wordBreakHelper(s, dp, N);
}
vector<string> wordBreakHelper(string & str, vector<vector<int> > & dp, int i) {
vector<string> res;
for (int j = 0; j < (int)dp[i].size(); j++) {
if (dp[i][j] == 0) {
res.push_back(str.substr(0, i));
}
else {
string tmp = str.substr(dp[i][j], i-dp[i][j]);
vector<string> sub = wordBreakHelper(str, dp, dp[i][j]);
for (int k = 0; k < (int)sub.size(); k++) {
res.push_back(sub[k]+" "+tmp);
}
}
}
return res;
}
*/
};
//-----------------------------
/*
Given two words (start and end), and a dictionary, find the length of shortest transformation sequence from start to end, such that:
Only one letter can be changed at a time
Each intermediate word must exist in the dictionary
For example,
Given:
start = "hit"
end = "cog"
dict = ["hot","dot","dog","lot","log"]
As one shortest transformation is "hit" -> "hot" -> "dot" -> "dog" -> "cog",
return its length 5.
Note:
Return 0 if there is no such transformation sequence.
All words have the same length.
All words contain only lowercase alphabetic characters.
*/
class Solution {
public:
vector<string > getNeighbors(const string& s, unordered_set<string> &dict){
vector<string > neighbors;
string start(s);
for(int i = 0; i < start.size(); ++i){
for(int j = 'a' ; j <= 'z' ; ++j){
start[i] = j;
if( dict.count(start) > 0){
neighbors.push_back(start);
dict.erase(start);
}
}
start = s;
}
return neighbors;
}
//BFS
int ladderLength(string start, string end, unordered_set<string> &dict) {
queue<pair<string,int> > q;
q.push(pair<string,int>(start,1));
dict.erase(start);
while(q.empty()==false){
pair<string,int> pair = q.front();
q.pop();
if(pair.first==end){
return pair.second;
}
else{
vector<string > neighbors = getNeighbors( pair.first, dict);
for(int i = 0; i < neighbors.size(); ++i){
q.push( std::pair<string,int>(neighbors[i],pair.second+1) );
}
}
}
//not found
return 0;
}
//used a different hash to track nodes which were visited before
/*
class Solution {
public:
int ladderLength(string start, string end, unordered_set<string> &dict) {
if(start==end)
return 0;
typedef pair<string,int> Pair;
unordered_set<string> used;
queue<Pair> q;
q.push(make_pair(start,1));
used.insert(start);
while(!q.empty()){
Pair p = q.front();
q.pop();
if(p.first==end) return p.second;
vector<string> neighbors = getNeighbors(p.first,dict);
for(int i = 0; i < neighbors.size(); ++i){
if(used.count(neighbors[i])==0){
used.insert(neighbors[i]);
q.push(pair<string,int>(neighbors[i],p.second+1));
}
}
}
return 0;
}
vector<string > getNeighbors(const string& s, unordered_set<string> &dict){
vector<string > neighbors;
string start(s);
for(int i = 0; i < start.size(); ++i){
for(int j = 'a' ; j <= 'z' ; ++j){
start[i] = j;
if( dict.count(start) > 0){
neighbors.push_back(start);
}
}
start = s;
}
return neighbors;
}
};
*/
};
//-----------------------------
/*
Given two words (start and end), and a dictionary, find all shortest transformation sequence(s) from start to end, such that:
Only one letter can be changed at a time
Each intermediate word must exist in the dictionary
For example,
Given:
start = "hit"
end = "cog"
dict = ["hot","dot","dog","lot","log"]
Return
[
["hit","hot","dot","dog","cog"],
["hit","hot","lot","log","cog"]
]
Note:
All words have the same length.
All words contain only lowercase alphabetic characters.
*/
//BFS to generate the graph then DFS to find pathes
class Solution {
public:
vector<string > getNeighbors(const string& s, unordered_set<string> &dict){
vector<string > neighbors;
string start(s);
for(int i = 0; i < start.size(); ++i){
for(int j = 'a' ; j <= 'z' ; ++j){
start[i] = j;
if(start!=s && dict.count(start) > 0){
neighbors.push_back(start);
}
}
start = s;
}
return neighbors;
}
vector<vector<string> > res;
vector<vector<string>> findLadders(string start, string end, unordered_set<string> &dict) {
unordered_map<string, vector<string> > graph;//build graph from start to end
unordered_set<string> visited; //track visited
unordered_set<string> curr, prev; // bfs levels
prev.insert(start);
visited.insert(start);
//BFS to build graph
while(!prev.empty()){
//mark prev visited
for(unordered_set<string>::iterator iter = prev.begin(); iter != prev.end(); ++iter){
visited.insert(*iter);
}
//get curr level
for(unordered_set<string>::iterator iter = prev.begin(); iter != prev.end(); ++iter){
const string& preStr = *iter;
vector<string> neighbors = getNeighbors(preStr,dict);
for(int i = 0; i < neighbors.size(); ++i){
string& curStr = neighbors[i];
if(visited.count(curStr)==0){ // not visited before
curr.insert(curStr);
graph[preStr].push_back(curStr);
//visited.insert(curStr);//Don't mark visited here, otherwise would block other paths
}
}
}
if(curr.size()==0) return res; //not found
if(curr.count(end)>0) break; //found
prev = curr;
curr.clear();
}
//DFS to find all paths
vector<string> path;
getPath(start, end, graph, path);
return res;
}
void getPath(string& start, string& end, unordered_map<string,vector<string> >& graph, vector<string> & path) {
path.push_back(start);
if (start == end) {
res.push_back(vector<string>(path.begin(), path.end()));
}
else {
vector<string>& childs = graph[start];
for (int i = 0; i < childs.size(); ++i ) {
getPath(childs[i], end, graph, path);
}
}
path.pop_back();
}
};
//-----------------------------
<file_sep>/*
There are two sorted arrays A and B of size m and n respectively. Find the median of the two sorted arrays. The overall run time complexity should be O(log (m+n)).
*/
class Solution {
public:
double findMedianSortedArrays(int A[], int m, int B[], int n) {
int t = m + n;
if(!t)
return 0.0;
if(t%2)
return (double)findkth(A,m,B,n,t/2+1);
else
return ( findkth(A,m,B,n,t/2+1) + findkth(A,m,B,n,t/2) ) / 2.0 ;
}
double findkth(int A[], int m, int B[], int n, int k){
if(m>n)
return findkth(B,n,A,m,k);
if(m==0)
return B[k-1];
if(k==1)
return min(A[0],B[0]);
int a = min(k/2,m);
int b = k - a;
if(A[a-1]==B[b-1])
return B[b-1];
else if(A[a-1]<B[b-1])
return findkth(A+a,m-a,B,n,k-a);
else
return findkth(A,m,B+b,n-b,k-b);
}
};
<file_sep>/*
Given a 2D binary matrix filled with 0's and 1's, find the largest rectangle containing all ones and return its area.
*/
class Solution {
public:
int maximalRectangle(vector<vector<char> > &matrix) {
return maximalRectangle1(matrix);
}
//solution based on largestRectangleArea
int maximalRectangle1(vector<vector<char> > &matrix) {
if(matrix.size()==0||matrix[0].size()==0){
return 0;
}
vector<vector<int> > area(matrix.size(),vector<int>(matrix[0].size(),0) );
for(int j = 0; j<matrix[0].size();++j){
for(int i = 0; i<matrix.size();++i){
area[i][j]=(matrix[i][j] == '1');
}
}
for(int j = 0; j<matrix[0].size();++j){
for(int i = 1; i<matrix.size();++i){
if(area[i][j]){
area[i][j] = area[i-1][j] + 1;
}
}
}
int max = 0;
for(int i = 0; i<area.size();++i){
max = std::max(max,largestRectangleArea(area[i]));
}
return max;
}
int largestRectangleArea(vector<int> &height) {
if(height.size()==0)
return 0;
if(height.size()==1){
return height[0];
}
vector<int> left(height.size(),0);
stack<int> indexes;
indexes.push(0);
for(int i = 1 ; i < left.size(); ++i){
while(indexes.empty()==false && height[ indexes.top() ] >= height[i] ){
indexes.pop();
}
if(indexes.empty()){
left[i] = i;
}
else{
left[i] = i - indexes.top() - 1 ;
}
indexes.push(i);
}
vector<int> right(height.size(),0);
indexes=stack<int>();
indexes.push(height.size()-1);
for(int i = right.size()-2; i >= 0 ; --i){
while(indexes.empty()==false && height[ indexes.top() ] >= height[i] ){
indexes.pop();
}
if(indexes.empty()){
right[i] = right.size() - i - 1;
}
else{
right[i] = indexes.top() - i - 1 ;
}
indexes.push(i);
}
int max = 0;
for(int i = 0 ; i < height.size(); ++i){
max = std::max(max, height[i]*(left[i]+1+right[i]) );
}
return max;
}
};<file_sep>/*
Given a string S and a string T, count the number of distinct subsequences of T in S.
A subsequence of a string is a new string which is formed from the original string by deleting some (can be none) of the characters without disturbing the relative positions of the remaining characters. (ie, "ACE" is a subsequence of "ABCDE" while "AEC" is not).
Here is an example:
S = "rabbbit", T = "rabbit"
Return 3.
*/
class Solution {
public:
int numDistinct(string S, string T) {
vector<vector<int> > dp(S.size()+1,vector<int>(T.size()+1,0) );
for(int i =0 ;i < S.size(); ++i){
dp[i][0]=1;
}
for(int i = 1 ; i <= S.size() ; ++i ){
for(int j = 1; j <= T.size(); ++j){
int num = dp[i-1][j];
if(S[i-1]==T[j-1]){
num += dp[i-1][j-1];
}
dp[i][j] = num;
}
}
return dp[S.size()][T.size()];
}
};<file_sep>/*
Sort a linked list using insertion sort.
*/
/**
* Definition for singly-linked list.
* struct ListNode {
* int val;
* ListNode *next;
* ListNode(int x) : val(x), next(NULL) {}
* };
*/
class Solution {
public:
ListNode *insertionSortList(ListNode *head) {
if(!head||!(head->next))
return head;
ListNode* nextNode = insertionSortList(head->next);
return insert(head,nextNode);
}
ListNode* insert(ListNode* newNode, ListNode* head){
ListNode dummy(0);
dummy.next=head;
ListNode* pre = &dummy;
while(pre->next&&newNode->val>pre->next->val)
pre=pre->next;
newNode->next=pre->next;
pre->next=newNode;
return dummy.next;
}
};<file_sep>/*
Two elements of a binary search tree (BST) are swapped by mistake.
Recover the tree without changing its structure.
Note:
A solution using O(n) space is pretty straight forward. Could you devise a constant space solution?
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ's Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where '#' signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}"
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
TreeNode* first = NULL;
TreeNode* second = NULL;
TreeNode* preNode = NULL;
void recoverTree(TreeNode *root) {
if(!root||(!root->left&&!root->right))
return;
inorder(root);
swap(first->val,second->val);
}
//inorder traversal find first and second
void inorder(TreeNode *root) {
if(!root)
return;
inorder(root->left);
if(preNode&&root->val<preNode->val){
if(!first){
first = preNode;
}
second = root;
}
preNode = root;
inorder(root->right);
}
};
};<file_sep>/*
Suppose a sorted array is rotated at some pivot unknown to you beforehand.
(i.e., 0 1 2 4 5 6 7 might become 4 5 6 7 0 1 2).
You are given a target value to search. If found in the array return its index, otherwise return -1.
You may assume no duplicate exists in the array.
*/
class Solution {
public:
int search(int A[], int n, int target) {
int b = 0, e = n -1;
while(b<=e){
int m = b + (e-b)/2;
if(A[m]==target){
return m;
}else if(A[m]>=A[b]){
if(A[m]>=target&&target>=A[b]){
e = m - 1;
}
else{
b = m + 1;
}
}
else{
if(A[e]>=target&&target>=A[m]){
b = m + 1;
}
else{
e = m - 1;
}
}
}
return -1;
}
};<file_sep>/*
Implement int sqrt(int x).
Compute and return the square root of x.
*/
class Solution {
public:
int sqrt(int x) {
if(x < 2) return x;
long long l = 1;
long long u = 1 + (x / 2);
while(l + 1 < u) {
long long m = l + (u - l) / 2;
long long p = m * m;
if(p > x)
u = m;
else if(p < x)
l = m;
else
return m;
}
return (int)l;
}
};
//how about double sqrt(double x)<file_sep>/*
Given an array with n objects colored red, white or blue, sort them so that objects of the same color are adjacent, with the colors in the order red, white and blue.
Here, we will use the integers 0, 1, and 2 to represent the color red, white, and blue respectively.
Note:
You are not suppose to use the library's sort function for this problem.
click to show follow up.
Follow up:
A rather straight forward solution is a two-pass algorithm using counting sort.
First, iterate the array counting number of 0's, 1's, and 2's, then overwrite array with total number of 0's, then 1's and followed by 2's.
Could you come up with an one-pass algorithm using only constant space?
*/
class Solution {
public:
void sortColors(int A[], int n) {
if(n<=1)
return;
int cur = 0;
int end0 = 0;
int begin2 = n;
while(cur<begin2){
if(A[cur]==1){
++cur;
}
else if(A[cur]==0){
swap(A[end0++],A[cur++]);
}
else{
swap(A[--begin2],A[cur]);
}
}
}
};<file_sep>/*
Given a binary tree, return the level order traversal of its nodes' values. (ie, from left to right, level by level).
For example:
Given binary tree {3,9,20,#,#,15,7},
3
/ \
9 20
/ \
15 7
return its level order traversal as:
[
[3],
[9,20],
[15,7]
]
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ's Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where '#' signifies a path terminator where no node exists below.
Here's an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}".
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Solution {
public:
void levelOrder(TreeNode *root, vector<vector<int> >& vv, int depth){
if(root==NULL)
return;
if(vv.size()<depth){
vv.push_back(vector<int>());
}
vv[depth-1].push_back(root->val);
levelOrder(root->left,vv, depth+1);
levelOrder(root->right,vv, depth+1);
}
vector<vector<int> > levelOrder(TreeNode *root) {
vector<vector<int> > vv;
if(root==NULL)
return vv;
levelOrder(root, vv, 1);
return vv;
}
//we can also use BFS, similar but simpler as Binary Tree Zigzag Level Order Traversal
};
<file_sep>/*
Given an array S of n integers, find three integers in S such that the sum is closest to a given number, target. Return the sum of the three integers. You may assume that each input would have exactly one solution.
For example, given array S = {-1 2 1 -4}, and target = 1.
The sum that is closest to the target is 2. (-1 + 2 + 1 = 2).
*/
class Solution {
public:
int threeSumClosest(vector<int> &num, int target) {
int N = num.size();
sort(begin(num), end(num));
int res = INT_MAX;
for (int k = 0; k < N-2; k++) {
int i = k+1, j = N-1;
while (i < j) {
int sum = num[i]+num[j]+num[k];
if (sum == target) return sum;
if (res==INT_MAX||abs(sum-target) < abs(res-target)) res = sum;
if (sum > target) j--;
else i++;
}
}
return res;
}
};<file_sep>/*
Determine whether an integer is a palindrome. Do this without extra space.
*/
class Solution {
public:
bool isPalindrome(int x) {
if(x<0)
return false;
if(x>=0&&x<=9)
return true;
int numDigits = (int)floor( log10(x) );
while(x!=0){
int most = x / pow(10,numDigits) ;
int least = x % 10;
if(most!=least)
return false;
x -= most * pow(10,numDigits);
x /= 10;
numDigits -= 2;
}
return true;
}
};<file_sep>/*
Given two binary strings, return their sum (also a binary string).
For example,
a = "11"
b = "1"
Return "100".
*/
class Solution {
public:
string addBinary(string a, string b) {
if(a=="0"){
return b;
}
if(b=="0"){
return a;
}
string c(max(a.size(),b.size())+1,'0');
reverse(a.begin(),a.end());
reverse(b.begin(),b.end());
a += string(c.size()-a.size(),'0');
b += string(c.size()-b.size(),'0');
int carry = 0;
for(int i = 0 ; i < c.size(); ++i){
int ai = a[i]-'0';
int bi = b[i]-'0';
if( (ai^bi^carry)
c[i] = '1';
else
c[i] = '0';
if(ai&&bi || ai&&carry || bi&&carry)
carry = 1;
else
carry = 0;
}
if(c[c.size()-1]=='0')
c = c.substr(0,c.size()-1);
reverse(c.begin(),c.end());
return c;
}
};<file_sep>/*
Given a sorted array and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order.
You may assume no duplicates in the array.
Here are few examples.
[1,3,5,6], 5 → 2
[1,3,5,6], 2 → 1
[1,3,5,6], 7 → 4
[1,3,5,6], 0 → 0
*/
class Solution {
public:
int lower_bound(const vector<int>& v, int target){
int l = 0;
int h = v.size();
while(l<h){
int m = l + (h-l)/2;
if(v[m]==target){
return m;
}
else if(v[m]>target){
h = m ;
}
else{
l = m + 1;
}
}
return h;
}
int searchInsert(int A[], int n, int target) {
vector<int> v(A,A+n);
return lower_bound(v,target);
}
};<file_sep>/*
Given a binary tree, find its minimum depth.
The minimum depth is the number of nodes along the shortest path from the root node down to the nearest leaf node.
*/
/**
* Definition for binary tree
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
//leaf is not root itself
class Solution {
public:
//BFS is better than DFS
int minDepth(TreeNode *root) {
if(root==NULL)
return 0;
queue<TreeNode*> q;
q.push(root);
int size = 1;
int level = 1;
while(!q.empty()){
TreeNode* n = q.front();
q.pop();
--size;
//leaf
if(n->left==NULL&&n->right==NULL){
return level;
}
if(n->left)
q.push(n->left);
if(n->right)
q.push(n->right);
if(0==size){
size = q.size();
++level;
}
}
}
}; | a0dd57763aba3f373f03ea6107dccca8926b5a86 | [
"C++"
] | 98 | C++ | zhanying26252625/leetcode | 1718544de94a1e42c632358d4a06ef69bf45953e | eed318d0b0b1d193e30c39d1d9d826cc1c00810b |
refs/heads/master | <repo_name>mattbenton/hvm<file_sep>/README.md
# hvm
Haxe Version Manager
<file_sep>/hvm
#!/bin/bash
echo HVM Test!
| 4a38939ed6470b207137e3587ae61f61adfad3c3 | [
"Markdown",
"Shell"
] | 2 | Markdown | mattbenton/hvm | 7f58fc07d2d087d5241e67dc5e1c032d06e095eb | 0d9696e06307b33ca63c8f009b7219c0d480b88e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.